diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index dd98abbd..a934f1e0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 # created: 2023-10-09T14:06:13.397766266Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 0332d326..16170d0c 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine diff --git a/docs/compute_v1/services.rst b/docs/compute_v1/services_.rst similarity index 100% rename from docs/compute_v1/services.rst rename to docs/compute_v1/services_.rst diff --git a/docs/compute_v1/types.rst b/docs/compute_v1/types_.rst similarity index 100% rename from docs/compute_v1/types.rst rename to docs/compute_v1/types_.rst diff --git a/docs/index.rst b/docs/index.rst index dbd139ec..b5b5d42b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,8 +8,8 @@ API Reference .. toctree:: :maxdepth: 2 - compute_v1/services - compute_v1/types + compute_v1/services_ + compute_v1/types_ Changelog diff --git a/tests/unit/gapic/compute_v1/test_accelerator_types.py b/tests/unit/gapic/compute_v1/test_accelerator_types.py index 03a2ca92..fc06105f 100644 --- a/tests/unit/gapic/compute_v1/test_accelerator_types.py +++ b/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -602,8 +602,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -693,8 +694,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -831,8 +833,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -977,8 +980,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1066,8 +1070,9 @@ def test_get_rest_required_fields(request_type=compute.GetAcceleratorTypeRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1211,8 +1216,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1282,8 +1288,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1373,8 +1380,9 @@ def test_list_rest_required_fields(request_type=compute.ListAcceleratorTypesRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1516,8 +1524,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AcceleratorTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AcceleratorTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_addresses.py b/tests/unit/gapic/compute_v1/test_addresses.py index 3fb46755..4389a680 100644 --- a/tests/unit/gapic/compute_v1/test_addresses.py +++ b/tests/unit/gapic/compute_v1/test_addresses.py @@ -576,8 +576,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -667,8 +668,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -803,8 +805,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -954,8 +957,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1059,8 +1063,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteAddressRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1194,8 +1199,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1283,8 +1289,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1366,8 +1373,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteAddressReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1501,8 +1509,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1587,8 +1596,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Address.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1687,8 +1697,9 @@ def test_get_rest_required_fields(request_type=compute.GetAddressRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Address.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1822,8 +1833,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Address.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1900,6 +1912,73 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "users": ["users_value1", "users_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertAddressRequest.meta.fields["address_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["address_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["address_resource"][field])): + del request_init["address_resource"][field][i][subfield] + else: + del request_init["address_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1933,8 +2012,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2035,8 +2115,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertAddressRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2127,28 +2208,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["address_resource"] = { - "address": "address_value", - "address_type": "address_type_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "ip_version": "ip_version_value", - "ipv6_endpoint_type": "ipv6_endpoint_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "prefix_length": 1391, - "purpose": "purpose_value", - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - "subnetwork": "subnetwork_value", - "users": ["users_value1", "users_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2188,8 +2247,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2266,6 +2326,73 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "users": ["users_value1", "users_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertAddressRequest.meta.fields["address_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["address_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["address_resource"][field])): + del request_init["address_resource"][field][i][subfield] + else: + del request_init["address_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2299,8 +2426,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2379,8 +2507,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertAddressReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2471,28 +2600,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["address_resource"] = { - "address": "address_value", - "address_type": "address_type_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "ip_version": "ip_version_value", - "ipv6_endpoint_type": "ipv6_endpoint_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "prefix_length": 1391, - "purpose": "purpose_value", - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - "subnetwork": "subnetwork_value", - "users": ["users_value1", "users_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2532,8 +2639,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2603,8 +2711,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2694,8 +2803,9 @@ def test_list_rest_required_fields(request_type=compute.ListAddressesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2831,8 +2941,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2945,6 +3056,84 @@ def test_move_rest(request_type): "description": "description_value", "destination_address": "destination_address_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveAddressRequest.meta.fields[ + "region_addresses_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_addresses_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["region_addresses_move_request_resource"][field]), + ): + del request_init["region_addresses_move_request_resource"][field][ + i + ][subfield] + else: + del request_init["region_addresses_move_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2978,8 +3167,9 @@ def test_move_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3084,8 +3274,9 @@ def test_move_rest_required_fields(request_type=compute.MoveAddressRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3177,10 +3368,6 @@ def test_move_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "address": "sample3"} - request_init["region_addresses_move_request_resource"] = { - "description": "description_value", - "destination_address": "destination_address_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3227,8 +3414,9 @@ def test_move_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3290,6 +3478,84 @@ def test_move_unary_rest(request_type): "description": "description_value", "destination_address": "destination_address_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveAddressRequest.meta.fields[ + "region_addresses_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_addresses_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["region_addresses_move_request_resource"][field]), + ): + del request_init["region_addresses_move_request_resource"][field][ + i + ][subfield] + else: + del request_init["region_addresses_move_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3323,8 +3589,9 @@ def test_move_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3407,8 +3674,9 @@ def test_move_unary_rest_required_fields(request_type=compute.MoveAddressRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3500,10 +3768,6 @@ def test_move_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "address": "sample3"} - request_init["region_addresses_move_request_resource"] = { - "description": "description_value", - "destination_address": "destination_address_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3550,8 +3814,9 @@ def test_move_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3613,6 +3878,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsAddressRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3646,8 +3986,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3752,8 +4093,9 @@ def test_set_labels_rest_required_fields(request_type=compute.SetLabelsAddressRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3847,10 +4189,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3897,8 +4235,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3960,6 +4299,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsAddressRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3993,8 +4407,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4079,8 +4494,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4174,10 +4590,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4224,8 +4636,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_autoscalers.py b/tests/unit/gapic/compute_v1/test_autoscalers.py index cc90b52e..3506fec8 100644 --- a/tests/unit/gapic/compute_v1/test_autoscalers.py +++ b/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -580,8 +580,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AutoscalerAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AutoscalerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -671,8 +672,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AutoscalerAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AutoscalerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -809,8 +811,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AutoscalerAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AutoscalerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -963,8 +966,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteAutoscalerReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1207,8 +1212,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1296,8 +1302,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1381,8 +1388,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1520,8 +1528,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1598,8 +1607,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Autoscaler.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1690,8 +1700,9 @@ def test_get_rest_required_fields(request_type=compute.GetAutoscalerRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Autoscaler.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1827,8 +1838,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Autoscaler.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1927,6 +1939,73 @@ def test_insert_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1960,8 +2039,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2062,8 +2142,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertAutoscalerReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2158,50 +2239,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2243,8 +2280,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2345,6 +2383,73 @@ def test_insert_unary_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2378,8 +2483,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2460,8 +2566,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2556,50 +2663,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2641,8 +2704,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2714,8 +2778,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AutoscalerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2805,8 +2870,9 @@ def test_list_rest_required_fields(request_type=compute.ListAutoscalersRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AutoscalerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2946,8 +3012,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AutoscalerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3100,6 +3167,73 @@ def test_patch_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3133,8 +3267,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3240,8 +3375,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchAutoscalerRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3339,50 +3475,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3424,8 +3516,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3526,6 +3619,73 @@ def test_patch_unary_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3559,8 +3719,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3644,8 +3805,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchAutoscalerRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3743,50 +3905,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3828,8 +3946,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3930,6 +4049,73 @@ def test_update_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3963,8 +4149,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4070,8 +4257,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateAutoscalerReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4171,50 +4359,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4256,8 +4400,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4358,6 +4503,73 @@ def test_update_unary_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4391,8 +4603,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4478,8 +4691,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4579,50 +4793,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4664,8 +4834,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_backend_buckets.py b/tests/unit/gapic/compute_v1/test_backend_buckets.py index 8381604d..929d4e51 100644 --- a/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -577,6 +577,77 @@ def test_add_signed_url_key_rest(request_type): "key_name": "key_name_value", "key_value": "key_value_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddSignedUrlKeyBackendBucketRequest.meta.fields[ + "signed_url_key_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "signed_url_key_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["signed_url_key_resource"][field])): + del request_init["signed_url_key_resource"][field][i][subfield] + else: + del request_init["signed_url_key_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -610,8 +681,9 @@ def test_add_signed_url_key_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -714,8 +786,9 @@ def test_add_signed_url_key_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -810,10 +883,6 @@ def test_add_signed_url_key_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["signed_url_key_resource"] = { - "key_name": "key_name_value", - "key_value": "key_value_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -853,8 +922,9 @@ def test_add_signed_url_key_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -913,6 +983,77 @@ def test_add_signed_url_key_unary_rest(request_type): "key_name": "key_name_value", "key_value": "key_value_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddSignedUrlKeyBackendBucketRequest.meta.fields[ + "signed_url_key_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "signed_url_key_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["signed_url_key_resource"][field])): + del request_init["signed_url_key_resource"][field][i][subfield] + else: + del request_init["signed_url_key_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -946,8 +1087,9 @@ def test_add_signed_url_key_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1028,8 +1170,9 @@ def test_add_signed_url_key_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1124,10 +1267,6 @@ def test_add_signed_url_key_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["signed_url_key_resource"] = { - "key_name": "key_name_value", - "key_value": "key_value_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1167,8 +1306,9 @@ def test_add_signed_url_key_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1256,8 +1396,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1357,8 +1498,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteBackendBucketReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1490,8 +1632,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1578,8 +1721,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1659,8 +1803,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1792,8 +1937,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1880,8 +2026,9 @@ def test_delete_signed_url_key_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1995,8 +2142,9 @@ def test_delete_signed_url_key_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2140,8 +2288,9 @@ def test_delete_signed_url_key_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2229,8 +2378,9 @@ def test_delete_signed_url_key_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2322,8 +2472,9 @@ def test_delete_signed_url_key_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2467,8 +2618,9 @@ def test_delete_signed_url_key_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2545,8 +2697,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2633,8 +2786,9 @@ def test_get_rest_required_fields(request_type=compute.GetBackendBucketRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2768,8 +2922,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2865,6 +3020,77 @@ def test_insert_rest(request_type): "name": "name_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2898,8 +3124,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2996,8 +3223,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertBackendBucketReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3091,48 +3319,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "compression_mode": "compression_mode_value", - "creation_timestamp": "creation_timestamp_value", - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_cdn": True, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3173,8 +3359,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3272,6 +3459,77 @@ def test_insert_unary_rest(request_type): "name": "name_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3305,8 +3563,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3383,8 +3642,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3478,48 +3738,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "compression_mode": "compression_mode_value", - "creation_timestamp": "creation_timestamp_value", - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_cdn": True, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3560,8 +3778,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3632,8 +3851,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendBucketList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3719,8 +3939,9 @@ def test_list_rest_required_fields(request_type=compute.ListBackendBucketsReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendBucketList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3856,8 +4077,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendBucketList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4007,6 +4229,77 @@ def test_patch_rest(request_type): "name": "name_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4040,8 +4333,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4142,8 +4436,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchBackendBucketReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4238,48 +4533,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "compression_mode": "compression_mode_value", - "creation_timestamp": "creation_timestamp_value", - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_cdn": True, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4321,8 +4574,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4421,6 +4675,77 @@ def test_patch_unary_rest(request_type): "name": "name_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4454,8 +4779,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4536,8 +4862,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4632,48 +4959,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "compression_mode": "compression_mode_value", - "creation_timestamp": "creation_timestamp_value", - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_cdn": True, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4715,8 +5000,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4776,6 +5062,81 @@ def test_set_edge_security_policy_rest(request_type): request_init["security_policy_reference_resource"] = { "security_policy": "security_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetEdgeSecurityPolicyBackendBucketRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4809,8 +5170,9 @@ def test_set_edge_security_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4913,8 +5275,9 @@ def test_set_edge_security_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5010,9 +5373,6 @@ def test_set_edge_security_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5054,8 +5414,9 @@ def test_set_edge_security_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5115,6 +5476,81 @@ def test_set_edge_security_policy_unary_rest(request_type): request_init["security_policy_reference_resource"] = { "security_policy": "security_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetEdgeSecurityPolicyBackendBucketRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5148,8 +5584,9 @@ def test_set_edge_security_policy_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5230,8 +5667,9 @@ def test_set_edge_security_policy_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5327,9 +5765,6 @@ def test_set_edge_security_policy_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5371,8 +5806,9 @@ def test_set_edge_security_policy_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5471,6 +5907,77 @@ def test_update_rest(request_type): "name": "name_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5504,8 +6011,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5606,8 +6114,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateBackendBucketReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5702,48 +6211,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "compression_mode": "compression_mode_value", - "creation_timestamp": "creation_timestamp_value", - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_cdn": True, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5785,8 +6252,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5885,6 +6353,77 @@ def test_update_unary_rest(request_type): "name": "name_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5918,8 +6457,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6000,8 +6540,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6096,48 +6637,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "compression_mode": "compression_mode_value", - "creation_timestamp": "creation_timestamp_value", - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_cdn": True, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6179,8 +6678,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_backend_services.py b/tests/unit/gapic/compute_v1/test_backend_services.py index 6fc57288..6757eca0 100644 --- a/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/tests/unit/gapic/compute_v1/test_backend_services.py @@ -591,6 +591,77 @@ def test_add_signed_url_key_rest(request_type): "key_name": "key_name_value", "key_value": "key_value_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddSignedUrlKeyBackendServiceRequest.meta.fields[ + "signed_url_key_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "signed_url_key_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["signed_url_key_resource"][field])): + del request_init["signed_url_key_resource"][field][i][subfield] + else: + del request_init["signed_url_key_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -624,8 +695,9 @@ def test_add_signed_url_key_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -728,8 +800,9 @@ def test_add_signed_url_key_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -824,10 +897,6 @@ def test_add_signed_url_key_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["signed_url_key_resource"] = { - "key_name": "key_name_value", - "key_value": "key_value_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -867,8 +936,9 @@ def test_add_signed_url_key_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -927,6 +997,77 @@ def test_add_signed_url_key_unary_rest(request_type): "key_name": "key_name_value", "key_value": "key_value_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddSignedUrlKeyBackendServiceRequest.meta.fields[ + "signed_url_key_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "signed_url_key_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["signed_url_key_resource"][field])): + del request_init["signed_url_key_resource"][field][i][subfield] + else: + del request_init["signed_url_key_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -960,8 +1101,9 @@ def test_add_signed_url_key_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1042,8 +1184,9 @@ def test_add_signed_url_key_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1138,10 +1281,6 @@ def test_add_signed_url_key_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["signed_url_key_resource"] = { - "key_name": "key_name_value", - "key_value": "key_value_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1181,8 +1320,9 @@ def test_add_signed_url_key_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1253,8 +1393,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1344,8 +1485,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1482,8 +1624,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1638,8 +1781,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1739,8 +1883,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteBackendServiceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1872,8 +2017,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1960,8 +2106,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2041,8 +2188,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2174,8 +2322,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2262,8 +2411,9 @@ def test_delete_signed_url_key_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2377,8 +2527,9 @@ def test_delete_signed_url_key_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2523,8 +2674,9 @@ def test_delete_signed_url_key_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2612,8 +2764,9 @@ def test_delete_signed_url_key_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2705,8 +2858,9 @@ def test_delete_signed_url_key_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2851,8 +3005,9 @@ def test_delete_signed_url_key_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2943,8 +3098,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3045,8 +3201,9 @@ def test_get_rest_required_fields(request_type=compute.GetBackendServiceRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3180,8 +3337,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3236,6 +3394,81 @@ def test_get_health_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} request_init["resource_group_reference_resource"] = {"group": "group_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.GetHealthBackendServiceRequest.meta.fields[ + "resource_group_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "resource_group_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["resource_group_reference_resource"][field]) + ): + del request_init["resource_group_reference_resource"][field][i][ + subfield + ] + else: + del request_init["resource_group_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3248,8 +3481,9 @@ def test_get_health_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3329,8 +3563,9 @@ def test_get_health_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3427,7 +3662,6 @@ def test_get_health_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["resource_group_reference_resource"] = {"group": "group_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3469,8 +3703,9 @@ def test_get_health_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3541,8 +3776,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3625,8 +3861,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3758,8 +3995,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3968,6 +4206,77 @@ def test_insert_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4001,8 +4310,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4099,8 +4409,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertBackendServiceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4194,161 +4505,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4389,8 +4545,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4601,6 +4758,77 @@ def test_insert_unary_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4634,8 +4862,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4712,8 +4941,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4807,161 +5037,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5002,8 +5077,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5074,8 +5150,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5161,8 +5238,9 @@ def test_list_rest_required_fields(request_type=compute.ListBackendServicesReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5298,8 +5376,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5562,6 +5641,77 @@ def test_patch_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5595,8 +5745,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5697,8 +5848,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5791,6 +5943,106 @@ def test_patch_rest_bad_request( transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchBackendServiceRequest(), + project="project_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + + +def test_patch_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchBackendServiceRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} request_init["backend_service_resource"] = { @@ -5948,261 +6200,78 @@ def test_patch_rest_bad_request( "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } - request = request_type(**request_init) + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.patch(request) + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_patch_rest_flattened(): - client = BackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation() - - # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "backend_service": "sample2"} - - # get truthy value for each flattened field - mock_args = dict( - project="project_value", - backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 - ), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.patch(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" - % client.transport._host, - args[1], - ) - - -def test_patch_rest_flattened_error(transport: str = "rest"): - client = BackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.patch( - compute.PatchBackendServiceRequest(), - project="project_value", - backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 - ), - ) - - -def test_patch_rest_error(): - client = BackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - compute.PatchBackendServiceRequest, - dict, - ], -) -def test_patch_unary_rest(request_type): - client = BackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } - request = request_type(**request_init) + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] + request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: @@ -6235,8 +6304,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6317,8 +6387,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6413,161 +6484,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6609,8 +6525,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6670,6 +6587,81 @@ def test_set_edge_security_policy_rest(request_type): request_init["security_policy_reference_resource"] = { "security_policy": "security_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetEdgeSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6703,8 +6695,9 @@ def test_set_edge_security_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6807,8 +6800,9 @@ def test_set_edge_security_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6904,9 +6898,6 @@ def test_set_edge_security_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6948,8 +6939,9 @@ def test_set_edge_security_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7009,6 +7001,81 @@ def test_set_edge_security_policy_unary_rest(request_type): request_init["security_policy_reference_resource"] = { "security_policy": "security_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetEdgeSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7042,8 +7109,9 @@ def test_set_edge_security_policy_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7124,8 +7192,9 @@ def test_set_edge_security_policy_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7221,9 +7290,6 @@ def test_set_edge_security_policy_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7265,8 +7331,9 @@ def test_set_edge_security_policy_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7400,6 +7467,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyBackendServiceRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7414,8 +7556,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7497,8 +7640,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7593,83 +7737,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7711,8 +7778,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7772,6 +7840,81 @@ def test_set_security_policy_rest(request_type): request_init["security_policy_reference_resource"] = { "security_policy": "security_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7805,8 +7948,9 @@ def test_set_security_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7909,8 +8053,9 @@ def test_set_security_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8005,9 +8150,6 @@ def test_set_security_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8049,8 +8191,9 @@ def test_set_security_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8110,6 +8253,81 @@ def test_set_security_policy_unary_rest(request_type): request_init["security_policy_reference_resource"] = { "security_policy": "security_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8143,8 +8361,9 @@ def test_set_security_policy_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8225,8 +8444,9 @@ def test_set_security_policy_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8321,9 +8541,6 @@ def test_set_security_policy_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8365,8 +8582,9 @@ def test_set_security_policy_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8578,6 +8796,77 @@ def test_update_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8611,8 +8900,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8713,8 +9003,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateBackendServiceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8724,246 +9015,91 @@ def test_update_rest_required_fields(request_type=compute.UpdateBackendServiceRe expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params - - -def test_update_rest_unset_required_fields(): - transport = transports.BackendServicesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("requestId",)) - & set( - ( - "backendService", - "backendServiceResource", - "project", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_rest_interceptors(null_interceptor): - transport = transports.BackendServicesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.BackendServicesRestInterceptor(), - ) - client = BackendServicesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.BackendServicesRestInterceptor, "post_update" - ) as post, mock.patch.object( - transports.BackendServicesRestInterceptor, "pre_update" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = compute.UpdateBackendServiceRequest.pb( - compute.UpdateBackendServiceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.UpdateBackendServiceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.update( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_rest_bad_request( - transport: str = "rest", request_type=compute.UpdateBackendServiceRequest -): - client = BackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", + + +def test_update_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendService", + "backendServiceResource", + "project", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_update" + ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "pre_update" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateBackendServiceRequest.pb( + compute.UpdateBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9005,8 +9141,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9218,6 +9355,77 @@ def test_update_unary_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9251,8 +9459,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9333,8 +9542,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9429,161 +9639,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9625,8 +9680,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_disk_types.py b/tests/unit/gapic/compute_v1/test_disk_types.py index d722c407..6abe1f95 100644 --- a/tests/unit/gapic/compute_v1/test_disk_types.py +++ b/tests/unit/gapic/compute_v1/test_disk_types.py @@ -571,8 +571,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -662,8 +663,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -798,8 +800,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -937,8 +940,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1028,8 +1032,9 @@ def test_get_rest_required_fields(request_type=compute.GetDiskTypeRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1163,8 +1168,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1234,8 +1240,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1325,8 +1332,9 @@ def test_list_rest_required_fields(request_type=compute.ListDiskTypesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1462,8 +1470,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_disks.py b/tests/unit/gapic/compute_v1/test_disks.py index 0e48e46f..a0a1d095 100644 --- a/tests/unit/gapic/compute_v1/test_disks.py +++ b/tests/unit/gapic/compute_v1/test_disks.py @@ -556,6 +556,88 @@ def test_add_resource_policies_rest(request_type): request_init["disks_add_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesDiskRequest.meta.fields[ + "disks_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["disks_add_resource_policies_request_resource"][ + field + ] + ), + ): + del request_init["disks_add_resource_policies_request_resource"][ + field + ][i][subfield] + else: + del request_init["disks_add_resource_policies_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -589,8 +671,9 @@ def test_add_resource_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -697,8 +780,9 @@ def test_add_resource_policies_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -792,9 +876,6 @@ def test_add_resource_policies_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -837,8 +918,9 @@ def test_add_resource_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -899,6 +981,88 @@ def test_add_resource_policies_unary_rest(request_type): request_init["disks_add_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesDiskRequest.meta.fields[ + "disks_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["disks_add_resource_policies_request_resource"][ + field + ] + ), + ): + del request_init["disks_add_resource_policies_request_resource"][ + field + ][i][subfield] + else: + del request_init["disks_add_resource_policies_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -932,8 +1096,9 @@ def test_add_resource_policies_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1018,8 +1183,9 @@ def test_add_resource_policies_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1113,9 +1279,6 @@ def test_add_resource_policies_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1158,8 +1321,9 @@ def test_add_resource_policies_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1233,8 +1397,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1324,8 +1489,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1460,8 +1626,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1581,6 +1748,81 @@ def test_bulk_insert_rest(request_type): request_init["bulk_insert_disk_resource_resource"] = { "source_consistency_group_policy": "source_consistency_group_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertDiskRequest.meta.fields[ + "bulk_insert_disk_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_disk_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["bulk_insert_disk_resource_resource"][field]) + ): + del request_init["bulk_insert_disk_resource_resource"][field][i][ + subfield + ] + else: + del request_init["bulk_insert_disk_resource_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1614,8 +1856,9 @@ def test_bulk_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1716,8 +1959,9 @@ def test_bulk_insert_rest_required_fields(request_type=compute.BulkInsertDiskReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1808,9 +2052,6 @@ def test_bulk_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["bulk_insert_disk_resource_resource"] = { - "source_consistency_group_policy": "source_consistency_group_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1852,8 +2093,9 @@ def test_bulk_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1913,6 +2155,81 @@ def test_bulk_insert_unary_rest(request_type): request_init["bulk_insert_disk_resource_resource"] = { "source_consistency_group_policy": "source_consistency_group_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertDiskRequest.meta.fields[ + "bulk_insert_disk_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_disk_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["bulk_insert_disk_resource_resource"][field]) + ): + del request_init["bulk_insert_disk_resource_resource"][field][i][ + subfield + ] + else: + del request_init["bulk_insert_disk_resource_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1946,8 +2263,9 @@ def test_bulk_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2028,8 +2346,9 @@ def test_bulk_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2120,9 +2439,6 @@ def test_bulk_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["bulk_insert_disk_resource_resource"] = { - "source_consistency_group_policy": "source_consistency_group_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2164,8 +2480,9 @@ def test_bulk_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2259,6 +2576,73 @@ def test_create_snapshot_rest(request_type): "storage_bytes_status": "storage_bytes_status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateSnapshotDiskRequest.meta.fields["snapshot_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2292,8 +2676,9 @@ def test_create_snapshot_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2405,8 +2790,9 @@ def test_create_snapshot_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2505,43 +2891,6 @@ def test_create_snapshot_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["snapshot_resource"] = { - "architecture": "architecture_value", - "auto_created": True, - "chain_name": "chain_name_value", - "creation_size_bytes": 2037, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_size_gb": 1261, - "download_bytes": 1502, - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "snapshot_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "snapshot_type": "snapshot_type_value", - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", - "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", - "status": "status_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2582,8 +2931,9 @@ def test_create_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2676,6 +3026,73 @@ def test_create_snapshot_unary_rest(request_type): "storage_bytes_status": "storage_bytes_status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateSnapshotDiskRequest.meta.fields["snapshot_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2709,8 +3126,9 @@ def test_create_snapshot_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2800,8 +3218,9 @@ def test_create_snapshot_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2900,43 +3319,6 @@ def test_create_snapshot_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["snapshot_resource"] = { - "architecture": "architecture_value", - "auto_created": True, - "chain_name": "chain_name_value", - "creation_size_bytes": 2037, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_size_gb": 1261, - "download_bytes": 1502, - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "snapshot_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "snapshot_type": "snapshot_type_value", - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", - "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", - "status": "status_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2977,8 +3359,9 @@ def test_create_snapshot_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3067,8 +3450,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3172,8 +3556,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteDiskRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3303,8 +3688,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3392,8 +3778,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3475,8 +3862,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3606,8 +3994,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3708,8 +4097,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Disk.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3830,8 +4220,9 @@ def test_get_rest_required_fields(request_type=compute.GetDiskRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Disk.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3961,8 +4352,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Disk.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4031,8 +4423,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4119,8 +4512,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4256,8 +4650,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4372,6 +4767,73 @@ def test_insert_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4405,8 +4867,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4512,8 +4975,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertDiskRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4609,66 +5073,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4708,8 +5112,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4824,6 +5229,73 @@ def test_insert_unary_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4857,8 +5329,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4942,8 +5415,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5039,66 +5513,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5138,8 +5552,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5209,8 +5624,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5300,8 +5716,9 @@ def test_list_rest_required_fields(request_type=compute.ListDisksRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5437,8 +5854,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5550,6 +5968,88 @@ def test_remove_resource_policies_rest(request_type): request_init["disks_remove_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveResourcePoliciesDiskRequest.meta.fields[ + "disks_remove_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_remove_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["disks_remove_resource_policies_request_resource"][ + field + ] + ), + ): + del request_init["disks_remove_resource_policies_request_resource"][ + field + ][i][subfield] + else: + del request_init["disks_remove_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5583,8 +6083,9 @@ def test_remove_resource_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5691,8 +6192,9 @@ def test_remove_resource_policies_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5786,9 +6288,6 @@ def test_remove_resource_policies_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_remove_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5831,8 +6330,9 @@ def test_remove_resource_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5893,6 +6393,88 @@ def test_remove_resource_policies_unary_rest(request_type): request_init["disks_remove_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveResourcePoliciesDiskRequest.meta.fields[ + "disks_remove_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_remove_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["disks_remove_resource_policies_request_resource"][ + field + ] + ), + ): + del request_init["disks_remove_resource_policies_request_resource"][ + field + ][i][subfield] + else: + del request_init["disks_remove_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5926,8 +6508,9 @@ def test_remove_resource_policies_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6012,8 +6595,9 @@ def test_remove_resource_policies_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6107,9 +6691,6 @@ def test_remove_resource_policies_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_remove_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6152,8 +6733,9 @@ def test_remove_resource_policies_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6212,6 +6794,79 @@ def test_resize_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["disks_resize_request_resource"] = {"size_gb": 739} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ResizeDiskRequest.meta.fields["disks_resize_request_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["disks_resize_request_resource"][field]) + ): + del request_init["disks_resize_request_resource"][field][i][ + subfield + ] + else: + del request_init["disks_resize_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6245,8 +6900,9 @@ def test_resize_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6351,8 +7007,9 @@ def test_resize_rest_required_fields(request_type=compute.ResizeDiskRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6444,7 +7101,6 @@ def test_resize_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_resize_request_resource"] = {"size_gb": 739} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6485,8 +7141,9 @@ def test_resize_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6543,6 +7200,79 @@ def test_resize_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["disks_resize_request_resource"] = {"size_gb": 739} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ResizeDiskRequest.meta.fields["disks_resize_request_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["disks_resize_request_resource"][field]) + ): + del request_init["disks_resize_request_resource"][field][i][ + subfield + ] + else: + del request_init["disks_resize_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6576,8 +7306,9 @@ def test_resize_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6660,8 +7391,9 @@ def test_resize_unary_rest_required_fields(request_type=compute.ResizeDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6753,7 +7485,6 @@ def test_resize_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_resize_request_resource"] = {"size_gb": 739} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6794,8 +7525,9 @@ def test_resize_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6928,6 +7660,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyDiskRequest.meta.fields[ + "zone_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_policy_request_resource"][field]) + ): + del request_init["zone_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6942,8 +7749,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7029,8 +7837,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7124,83 +7933,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7247,8 +7979,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7310,6 +8043,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsDiskRequest.meta.fields[ + "zone_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_labels_request_resource"][field]) + ): + del request_init["zone_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7343,8 +8151,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7449,8 +8258,9 @@ def test_set_labels_rest_required_fields(request_type=compute.SetLabelsDiskReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7542,10 +8352,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7592,8 +8398,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7655,6 +8462,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsDiskRequest.meta.fields[ + "zone_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_labels_request_resource"][field]) + ): + del request_init["zone_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7688,8 +8570,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7774,8 +8657,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7867,10 +8751,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7917,8 +8797,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7979,6 +8860,88 @@ def test_start_async_replication_rest(request_type): request_init["disks_start_async_replication_request_resource"] = { "async_secondary_disk": "async_secondary_disk_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StartAsyncReplicationDiskRequest.meta.fields[ + "disks_start_async_replication_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_start_async_replication_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["disks_start_async_replication_request_resource"][ + field + ] + ), + ): + del request_init["disks_start_async_replication_request_resource"][ + field + ][i][subfield] + else: + del request_init["disks_start_async_replication_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8012,8 +8975,9 @@ def test_start_async_replication_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8120,8 +9084,9 @@ def test_start_async_replication_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8215,9 +9180,6 @@ def test_start_async_replication_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_start_async_replication_request_resource"] = { - "async_secondary_disk": "async_secondary_disk_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8260,8 +9222,9 @@ def test_start_async_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8322,6 +9285,88 @@ def test_start_async_replication_unary_rest(request_type): request_init["disks_start_async_replication_request_resource"] = { "async_secondary_disk": "async_secondary_disk_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StartAsyncReplicationDiskRequest.meta.fields[ + "disks_start_async_replication_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_start_async_replication_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["disks_start_async_replication_request_resource"][ + field + ] + ), + ): + del request_init["disks_start_async_replication_request_resource"][ + field + ][i][subfield] + else: + del request_init["disks_start_async_replication_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8355,8 +9400,9 @@ def test_start_async_replication_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8441,8 +9487,9 @@ def test_start_async_replication_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8536,9 +9583,6 @@ def test_start_async_replication_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_start_async_replication_request_resource"] = { - "async_secondary_disk": "async_secondary_disk_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8581,8 +9625,9 @@ def test_start_async_replication_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8673,8 +9718,9 @@ def test_stop_async_replication_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8780,8 +9826,9 @@ def test_stop_async_replication_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8913,8 +9960,9 @@ def test_stop_async_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9002,8 +10050,9 @@ def test_stop_async_replication_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9087,8 +10136,9 @@ def test_stop_async_replication_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9220,8 +10270,9 @@ def test_stop_async_replication_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9279,6 +10330,88 @@ def test_stop_group_async_replication_rest(request_type): request_init["disks_stop_group_async_replication_resource_resource"] = { "resource_policy": "resource_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StopGroupAsyncReplicationDiskRequest.meta.fields[ + "disks_stop_group_async_replication_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_stop_group_async_replication_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field] + ), + ): + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][i][subfield] + else: + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9312,8 +10445,9 @@ def test_stop_group_async_replication_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9416,8 +10550,9 @@ def test_stop_group_async_replication_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9510,9 +10645,6 @@ def test_stop_group_async_replication_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["disks_stop_group_async_replication_resource_resource"] = { - "resource_policy": "resource_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9554,8 +10686,9 @@ def test_stop_group_async_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9615,6 +10748,88 @@ def test_stop_group_async_replication_unary_rest(request_type): request_init["disks_stop_group_async_replication_resource_resource"] = { "resource_policy": "resource_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StopGroupAsyncReplicationDiskRequest.meta.fields[ + "disks_stop_group_async_replication_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_stop_group_async_replication_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field] + ), + ): + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][i][subfield] + else: + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9648,8 +10863,9 @@ def test_stop_group_async_replication_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9730,8 +10946,9 @@ def test_stop_group_async_replication_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9824,9 +11041,6 @@ def test_stop_group_async_replication_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["disks_stop_group_async_replication_resource_resource"] = { - "resource_policy": "resource_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9868,8 +11082,9 @@ def test_stop_group_async_replication_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9931,6 +11146,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsDiskRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9943,8 +11233,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10028,8 +11319,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10125,9 +11417,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10174,8 +11463,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10293,6 +11583,73 @@ def test_update_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10326,8 +11683,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10438,8 +11796,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateDiskRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10537,66 +11896,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10637,8 +11936,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10754,6 +12054,73 @@ def test_update_unary_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10787,8 +12154,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10877,8 +12245,9 @@ def test_update_unary_rest_required_fields(request_type=compute.UpdateDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10976,66 +12345,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11076,8 +12385,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py b/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py index 280bbb21..42d1af10 100644 --- a/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py +++ b/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py @@ -634,8 +634,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -737,8 +738,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -870,8 +872,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -958,8 +961,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1039,8 +1043,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1172,8 +1177,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1246,8 +1252,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExternalVpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExternalVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1331,8 +1338,9 @@ def test_get_rest_required_fields(request_type=compute.GetExternalVpnGatewayRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExternalVpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExternalVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1466,8 +1474,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExternalVpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExternalVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1533,6 +1542,81 @@ def test_insert_rest(request_type): "redundancy_type": "redundancy_type_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertExternalVpnGatewayRequest.meta.fields[ + "external_vpn_gateway_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "external_vpn_gateway_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["external_vpn_gateway_resource"][field]) + ): + del request_init["external_vpn_gateway_resource"][field][i][ + subfield + ] + else: + del request_init["external_vpn_gateway_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1566,8 +1650,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1666,8 +1751,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1761,18 +1847,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["external_vpn_gateway_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "interfaces": [{"id": 205, "ip_address": "ip_address_value"}], - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "redundancy_type": "redundancy_type_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1813,8 +1887,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1882,6 +1957,81 @@ def test_insert_unary_rest(request_type): "redundancy_type": "redundancy_type_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertExternalVpnGatewayRequest.meta.fields[ + "external_vpn_gateway_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "external_vpn_gateway_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["external_vpn_gateway_resource"][field]) + ): + del request_init["external_vpn_gateway_resource"][field][i][ + subfield + ] + else: + del request_init["external_vpn_gateway_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1915,8 +2065,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1993,8 +2144,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2088,18 +2240,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["external_vpn_gateway_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "interfaces": [{"id": 205, "ip_address": "ip_address_value"}], - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "redundancy_type": "redundancy_type_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2140,8 +2280,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2213,8 +2354,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExternalVpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExternalVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2301,8 +2443,9 @@ def test_list_rest_required_fields(request_type=compute.ListExternalVpnGatewaysR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExternalVpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExternalVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2438,8 +2581,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExternalVpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExternalVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2551,6 +2695,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsExternalVpnGatewayRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2584,8 +2803,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2686,8 +2906,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2782,10 +3003,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2827,8 +3044,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2889,6 +3107,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsExternalVpnGatewayRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2922,8 +3215,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3002,8 +3296,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3098,10 +3393,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3143,8 +3434,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3204,6 +3496,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsExternalVpnGatewayRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3216,8 +3583,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3297,8 +3665,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3396,9 +3765,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3440,8 +3806,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_firewall_policies.py b/tests/unit/gapic/compute_v1/test_firewall_policies.py index 24a9c01f..00703630 100644 --- a/tests/unit/gapic/compute_v1/test_firewall_policies.py +++ b/tests/unit/gapic/compute_v1/test_firewall_policies.py @@ -600,6 +600,83 @@ def test_add_association_rest(request_type): "name": "name_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAssociationFirewallPolicyRequest.meta.fields[ + "firewall_policy_association_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_association_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_association_resource"][field]) + ): + del request_init["firewall_policy_association_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_association_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -633,8 +710,9 @@ def test_add_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -738,8 +816,9 @@ def test_add_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -838,13 +917,6 @@ def test_add_association_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_association_resource"] = { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -885,8 +957,9 @@ def test_add_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -949,6 +1022,83 @@ def test_add_association_unary_rest(request_type): "name": "name_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAssociationFirewallPolicyRequest.meta.fields[ + "firewall_policy_association_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_association_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_association_resource"][field]) + ): + del request_init["firewall_policy_association_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_association_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -982,8 +1132,9 @@ def test_add_association_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1065,8 +1216,9 @@ def test_add_association_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1165,13 +1317,6 @@ def test_add_association_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_association_resource"] = { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1212,8 +1357,9 @@ def test_add_association_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1320,6 +1466,81 @@ def test_add_rule_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1353,8 +1574,9 @@ def test_add_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1453,8 +1675,9 @@ def test_add_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1548,57 +1771,6 @@ def test_add_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1639,8 +1811,9 @@ def test_add_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1747,6 +1920,81 @@ def test_add_rule_unary_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1780,8 +2028,9 @@ def test_add_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1858,8 +2107,9 @@ def test_add_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1953,57 +2203,6 @@ def test_add_rule_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2044,8 +2243,9 @@ def test_add_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2134,8 +2334,9 @@ def test_clone_rules_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2238,8 +2439,9 @@ def test_clone_rules_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2370,8 +2572,9 @@ def test_clone_rules_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2457,8 +2660,9 @@ def test_clone_rules_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2539,8 +2743,9 @@ def test_clone_rules_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2671,8 +2876,9 @@ def test_clone_rules_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2758,8 +2964,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2855,8 +3062,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteFirewallPolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2979,8 +3187,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3066,8 +3275,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3143,8 +3353,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3267,8 +3478,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3345,8 +3557,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3431,8 +3644,9 @@ def test_get_rest_required_fields(request_type=compute.GetFirewallPolicyRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3557,8 +3771,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3627,8 +3842,9 @@ def test_get_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3709,8 +3925,9 @@ def test_get_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3835,8 +4052,9 @@ def test_get_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3903,8 +4121,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3983,8 +4202,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4109,8 +4329,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4185,8 +4406,9 @@ def test_get_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4273,8 +4495,9 @@ def test_get_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4399,8 +4622,9 @@ def test_get_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4539,6 +4763,77 @@ def test_insert_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4572,8 +4867,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4678,8 +4974,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertFirewallPolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4783,92 +5080,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4913,8 +5124,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5059,6 +5271,77 @@ def test_insert_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5092,8 +5375,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5178,8 +5462,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5283,92 +5568,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5413,8 +5612,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5487,8 +5687,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5670,10 +5871,9 @@ def test_list_associations_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPoliciesListAssociationsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPoliciesListAssociationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5821,8 +6021,9 @@ def test_move_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5930,8 +6131,9 @@ def test_move_rest_required_fields(request_type=compute.MoveFirewallPolicyReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6073,8 +6275,9 @@ def test_move_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6161,8 +6364,9 @@ def test_move_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6250,8 +6454,9 @@ def test_move_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6393,8 +6598,9 @@ def test_move_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6534,6 +6740,77 @@ def test_patch_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6567,8 +6844,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6665,8 +6943,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchFirewallPolicyRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6760,92 +7039,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6890,8 +7083,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7037,6 +7231,77 @@ def test_patch_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7070,8 +7335,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7148,8 +7414,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7213,122 +7480,36 @@ def test_patch_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchFirewallPolicyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.patch_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchFirewallPolicyRequest -): - client = FirewallPoliciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } + request = compute.PatchFirewallPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7373,8 +7554,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7485,6 +7667,81 @@ def test_patch_rule_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7518,8 +7775,9 @@ def test_patch_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7623,8 +7881,9 @@ def test_patch_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7723,57 +7982,6 @@ def test_patch_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7814,8 +8022,9 @@ def test_patch_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7922,6 +8131,81 @@ def test_patch_rule_unary_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7955,8 +8239,9 @@ def test_patch_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8038,8 +8323,9 @@ def test_patch_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8138,57 +8424,6 @@ def test_patch_rule_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"firewall_policy": "sample1"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8229,8 +8464,9 @@ def test_patch_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8319,8 +8555,9 @@ def test_remove_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8423,8 +8660,9 @@ def test_remove_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8555,8 +8793,9 @@ def test_remove_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8642,8 +8881,9 @@ def test_remove_association_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8724,8 +8964,9 @@ def test_remove_association_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8856,8 +9097,9 @@ def test_remove_association_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8943,8 +9185,9 @@ def test_remove_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9047,8 +9290,9 @@ def test_remove_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9179,8 +9423,9 @@ def test_remove_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9266,8 +9511,9 @@ def test_remove_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9348,8 +9594,9 @@ def test_remove_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9480,8 +9727,9 @@ def test_remove_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9611,6 +9859,88 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyFirewallPolicyRequest.meta.fields[ + "global_organization_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_organization_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["global_organization_set_policy_request_resource"][ + field + ] + ), + ): + del request_init["global_organization_set_policy_request_resource"][ + field + ][i][subfield] + else: + del request_init["global_organization_set_policy_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9625,8 +9955,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9704,8 +10035,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9799,83 +10131,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"resource": "sample1"} - request_init["global_organization_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9916,8 +10171,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9976,6 +10232,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsFirewallPolicyRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9988,8 +10319,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10065,8 +10397,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10163,9 +10496,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"resource": "sample1"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10206,8 +10536,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_firewalls.py b/tests/unit/gapic/compute_v1/test_firewalls.py index affe0c85..1511e3a0 100644 --- a/tests/unit/gapic/compute_v1/test_firewalls.py +++ b/tests/unit/gapic/compute_v1/test_firewalls.py @@ -593,8 +593,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -694,8 +695,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteFirewallRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -823,8 +825,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -911,8 +914,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -990,8 +994,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteFirewallRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1119,8 +1124,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1201,8 +1207,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Firewall.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Firewall.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1294,8 +1301,9 @@ def test_get_rest_required_fields(request_type=compute.GetFirewallRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Firewall.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Firewall.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1423,8 +1431,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Firewall.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Firewall.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1518,6 +1527,73 @@ def test_insert_rest(request_type): ], "target_tags": ["target_tags_value1", "target_tags_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertFirewallRequest.meta.fields["firewall_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_resource"][field])): + del request_init["firewall_resource"][field][i][subfield] + else: + del request_init["firewall_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1551,8 +1627,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1649,8 +1726,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertFirewallRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1740,46 +1818,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["firewall_resource"] = { - "allowed": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "creation_timestamp": "creation_timestamp_value", - "denied": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "description": "description_value", - "destination_ranges": [ - "destination_ranges_value1", - "destination_ranges_value2", - ], - "direction": "direction_value", - "disabled": True, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True, "metadata": "metadata_value"}, - "name": "name_value", - "network": "network_value", - "priority": 898, - "self_link": "self_link_value", - "source_ranges": ["source_ranges_value1", "source_ranges_value2"], - "source_service_accounts": [ - "source_service_accounts_value1", - "source_service_accounts_value2", - ], - "source_tags": ["source_tags_value1", "source_tags_value2"], - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - "target_tags": ["target_tags_value1", "target_tags_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1820,8 +1858,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1917,6 +1956,73 @@ def test_insert_unary_rest(request_type): ], "target_tags": ["target_tags_value1", "target_tags_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertFirewallRequest.meta.fields["firewall_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_resource"][field])): + del request_init["firewall_resource"][field][i][subfield] + else: + del request_init["firewall_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1950,8 +2056,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2026,8 +2133,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertFirewallRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2117,46 +2225,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["firewall_resource"] = { - "allowed": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "creation_timestamp": "creation_timestamp_value", - "denied": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "description": "description_value", - "destination_ranges": [ - "destination_ranges_value1", - "destination_ranges_value2", - ], - "direction": "direction_value", - "disabled": True, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True, "metadata": "metadata_value"}, - "name": "name_value", - "network": "network_value", - "priority": 898, - "self_link": "self_link_value", - "source_ranges": ["source_ranges_value1", "source_ranges_value2"], - "source_service_accounts": [ - "source_service_accounts_value1", - "source_service_accounts_value2", - ], - "source_tags": ["source_tags_value1", "source_tags_value2"], - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - "target_tags": ["target_tags_value1", "target_tags_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2197,8 +2265,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2269,8 +2338,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2356,8 +2426,9 @@ def test_list_rest_required_fields(request_type=compute.ListFirewallsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2487,8 +2558,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2636,6 +2708,73 @@ def test_patch_rest(request_type): ], "target_tags": ["target_tags_value1", "target_tags_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchFirewallRequest.meta.fields["firewall_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_resource"][field])): + del request_init["firewall_resource"][field][i][subfield] + else: + del request_init["firewall_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2669,8 +2808,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2771,8 +2911,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchFirewallRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2863,46 +3004,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall": "sample2"} - request_init["firewall_resource"] = { - "allowed": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "creation_timestamp": "creation_timestamp_value", - "denied": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "description": "description_value", - "destination_ranges": [ - "destination_ranges_value1", - "destination_ranges_value2", - ], - "direction": "direction_value", - "disabled": True, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True, "metadata": "metadata_value"}, - "name": "name_value", - "network": "network_value", - "priority": 898, - "self_link": "self_link_value", - "source_ranges": ["source_ranges_value1", "source_ranges_value2"], - "source_service_accounts": [ - "source_service_accounts_value1", - "source_service_accounts_value2", - ], - "source_tags": ["source_tags_value1", "source_tags_value2"], - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - "target_tags": ["target_tags_value1", "target_tags_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2944,8 +3045,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3042,6 +3144,73 @@ def test_patch_unary_rest(request_type): ], "target_tags": ["target_tags_value1", "target_tags_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchFirewallRequest.meta.fields["firewall_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_resource"][field])): + del request_init["firewall_resource"][field][i][subfield] + else: + del request_init["firewall_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3075,8 +3244,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3155,8 +3325,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchFirewallRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3247,46 +3418,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall": "sample2"} - request_init["firewall_resource"] = { - "allowed": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "creation_timestamp": "creation_timestamp_value", - "denied": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "description": "description_value", - "destination_ranges": [ - "destination_ranges_value1", - "destination_ranges_value2", - ], - "direction": "direction_value", - "disabled": True, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True, "metadata": "metadata_value"}, - "name": "name_value", - "network": "network_value", - "priority": 898, - "self_link": "self_link_value", - "source_ranges": ["source_ranges_value1", "source_ranges_value2"], - "source_service_accounts": [ - "source_service_accounts_value1", - "source_service_accounts_value2", - ], - "source_tags": ["source_tags_value1", "source_tags_value2"], - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - "target_tags": ["target_tags_value1", "target_tags_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3328,8 +3459,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3426,6 +3558,73 @@ def test_update_rest(request_type): ], "target_tags": ["target_tags_value1", "target_tags_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateFirewallRequest.meta.fields["firewall_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_resource"][field])): + del request_init["firewall_resource"][field][i][subfield] + else: + del request_init["firewall_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3459,8 +3658,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3561,8 +3761,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateFirewallRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3653,46 +3854,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall": "sample2"} - request_init["firewall_resource"] = { - "allowed": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "creation_timestamp": "creation_timestamp_value", - "denied": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "description": "description_value", - "destination_ranges": [ - "destination_ranges_value1", - "destination_ranges_value2", - ], - "direction": "direction_value", - "disabled": True, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True, "metadata": "metadata_value"}, - "name": "name_value", - "network": "network_value", - "priority": 898, - "self_link": "self_link_value", - "source_ranges": ["source_ranges_value1", "source_ranges_value2"], - "source_service_accounts": [ - "source_service_accounts_value1", - "source_service_accounts_value2", - ], - "source_tags": ["source_tags_value1", "source_tags_value2"], - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - "target_tags": ["target_tags_value1", "target_tags_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3734,8 +3895,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3832,6 +3994,73 @@ def test_update_unary_rest(request_type): ], "target_tags": ["target_tags_value1", "target_tags_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateFirewallRequest.meta.fields["firewall_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_resource"][field])): + del request_init["firewall_resource"][field][i][subfield] + else: + del request_init["firewall_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3865,8 +4094,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3945,8 +4175,9 @@ def test_update_unary_rest_required_fields(request_type=compute.UpdateFirewallRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4037,46 +4268,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall": "sample2"} - request_init["firewall_resource"] = { - "allowed": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "creation_timestamp": "creation_timestamp_value", - "denied": [ - { - "I_p_protocol": "I_p_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "description": "description_value", - "destination_ranges": [ - "destination_ranges_value1", - "destination_ranges_value2", - ], - "direction": "direction_value", - "disabled": True, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True, "metadata": "metadata_value"}, - "name": "name_value", - "network": "network_value", - "priority": 898, - "self_link": "self_link_value", - "source_ranges": ["source_ranges_value1", "source_ranges_value2"], - "source_service_accounts": [ - "source_service_accounts_value1", - "source_service_accounts_value2", - ], - "source_tags": ["source_tags_value1", "source_tags_value2"], - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - "target_tags": ["target_tags_value1", "target_tags_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4118,8 +4309,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/tests/unit/gapic/compute_v1/test_forwarding_rules.py index 22cea3c4..26028c27 100644 --- a/tests/unit/gapic/compute_v1/test_forwarding_rules.py +++ b/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -603,8 +603,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -694,8 +695,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -832,8 +834,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -992,8 +995,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1097,8 +1101,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteForwardingRuleRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1240,8 +1245,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1333,8 +1339,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1418,8 +1425,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1561,8 +1569,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1663,8 +1672,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1775,8 +1785,9 @@ def test_get_rest_required_fields(request_type=compute.GetForwardingRuleRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1920,8 +1931,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2023,6 +2035,77 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2056,8 +2139,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2158,8 +2242,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertForwardingRuleRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2254,53 +2339,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2342,8 +2380,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2447,6 +2486,77 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2480,8 +2590,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2562,8 +2673,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2658,53 +2770,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2746,8 +2811,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2819,8 +2885,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2910,8 +2977,9 @@ def test_list_rest_required_fields(request_type=compute.ListForwardingRulesReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3053,8 +3121,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3214,6 +3283,77 @@ def test_patch_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3247,8 +3387,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3353,8 +3494,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchForwardingRuleRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3454,53 +3596,6 @@ def test_patch_rest_bad_request( "region": "sample2", "forwarding_rule": "sample3", } - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3547,8 +3642,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3657,6 +3753,77 @@ def test_patch_unary_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3690,8 +3857,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3776,8 +3944,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3877,53 +4046,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "forwarding_rule": "sample3", } - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3970,8 +4092,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4033,6 +4156,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsForwardingRuleRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4066,8 +4264,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4174,8 +4373,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4271,10 +4471,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4321,8 +4517,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4384,6 +4581,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsForwardingRuleRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4417,8 +4689,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4503,8 +4776,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4600,10 +4874,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4650,8 +4920,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4714,6 +4985,79 @@ def test_set_target_rest(request_type): "forwarding_rule": "sample3", } request_init["target_reference_resource"] = {"target": "target_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetForwardingRuleRequest.meta.fields[ + "target_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_reference_resource"][field]) + ): + del request_init["target_reference_resource"][field][i][subfield] + else: + del request_init["target_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4747,8 +5091,9 @@ def test_set_target_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4855,8 +5200,9 @@ def test_set_target_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4956,7 +5302,6 @@ def test_set_target_rest_bad_request( "region": "sample2", "forwarding_rule": "sample3", } - request_init["target_reference_resource"] = {"target": "target_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5001,8 +5346,9 @@ def test_set_target_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5063,6 +5409,79 @@ def test_set_target_unary_rest(request_type): "forwarding_rule": "sample3", } request_init["target_reference_resource"] = {"target": "target_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetForwardingRuleRequest.meta.fields[ + "target_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_reference_resource"][field]) + ): + del request_init["target_reference_resource"][field][i][subfield] + else: + del request_init["target_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5096,8 +5515,9 @@ def test_set_target_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5182,8 +5602,9 @@ def test_set_target_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5283,7 +5704,6 @@ def test_set_target_unary_rest_bad_request( "region": "sample2", "forwarding_rule": "sample3", } - request_init["target_reference_resource"] = {"target": "target_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5328,8 +5748,9 @@ def test_set_target_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_global_addresses.py b/tests/unit/gapic/compute_v1/test_global_addresses.py index 8c72f6f4..320cda98 100644 --- a/tests/unit/gapic/compute_v1/test_global_addresses.py +++ b/tests/unit/gapic/compute_v1/test_global_addresses.py @@ -620,8 +620,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -721,8 +722,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteGlobalAddressReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -854,8 +856,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -942,8 +945,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1023,8 +1027,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1156,8 +1161,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1241,8 +1247,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Address.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1337,8 +1344,9 @@ def test_get_rest_required_fields(request_type=compute.GetGlobalAddressRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Address.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1470,8 +1478,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Address.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1547,6 +1556,73 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "users": ["users_value1", "users_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalAddressRequest.meta.fields["address_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["address_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["address_resource"][field])): + del request_init["address_resource"][field][i][subfield] + else: + del request_init["address_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1580,8 +1656,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1678,8 +1755,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertGlobalAddressReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1773,28 +1851,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["address_resource"] = { - "address": "address_value", - "address_type": "address_type_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "ip_version": "ip_version_value", - "ipv6_endpoint_type": "ipv6_endpoint_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "prefix_length": 1391, - "purpose": "purpose_value", - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - "subnetwork": "subnetwork_value", - "users": ["users_value1", "users_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1833,8 +1889,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1910,6 +1967,73 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "users": ["users_value1", "users_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalAddressRequest.meta.fields["address_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["address_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["address_resource"][field])): + del request_init["address_resource"][field][i][subfield] + else: + del request_init["address_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1943,8 +2067,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2021,8 +2146,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2116,28 +2242,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["address_resource"] = { - "address": "address_value", - "address_type": "address_type_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "ip_version": "ip_version_value", - "ipv6_endpoint_type": "ipv6_endpoint_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "prefix_length": 1391, - "purpose": "purpose_value", - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - "subnetwork": "subnetwork_value", - "users": ["users_value1", "users_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2176,8 +2280,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2246,8 +2351,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2333,8 +2439,9 @@ def test_list_rest_required_fields(request_type=compute.ListGlobalAddressesReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2468,8 +2575,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.AddressList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2581,6 +2689,84 @@ def test_move_rest(request_type): "description": "description_value", "destination_address": "destination_address_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveGlobalAddressRequest.meta.fields[ + "global_addresses_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_addresses_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["global_addresses_move_request_resource"][field]), + ): + del request_init["global_addresses_move_request_resource"][field][ + i + ][subfield] + else: + del request_init["global_addresses_move_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2614,8 +2800,9 @@ def test_move_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2716,8 +2903,9 @@ def test_move_rest_required_fields(request_type=compute.MoveGlobalAddressRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2812,10 +3000,6 @@ def test_move_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "address": "sample2"} - request_init["global_addresses_move_request_resource"] = { - "description": "description_value", - "destination_address": "destination_address_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2857,8 +3041,9 @@ def test_move_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2919,6 +3104,84 @@ def test_move_unary_rest(request_type): "description": "description_value", "destination_address": "destination_address_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveGlobalAddressRequest.meta.fields[ + "global_addresses_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_addresses_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["global_addresses_move_request_resource"][field]), + ): + del request_init["global_addresses_move_request_resource"][field][ + i + ][subfield] + else: + del request_init["global_addresses_move_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2952,8 +3215,9 @@ def test_move_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3032,8 +3296,9 @@ def test_move_unary_rest_required_fields(request_type=compute.MoveGlobalAddressR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3128,10 +3393,6 @@ def test_move_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "address": "sample2"} - request_init["global_addresses_move_request_resource"] = { - "description": "description_value", - "destination_address": "destination_address_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3173,8 +3434,9 @@ def test_move_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3235,6 +3497,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsGlobalAddressRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3268,8 +3605,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3370,8 +3708,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3466,10 +3805,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3511,8 +3846,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3573,6 +3909,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsGlobalAddressRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3606,8 +4017,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3686,8 +4098,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3782,10 +4195,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3827,8 +4236,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py b/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py index 14a2ce6b..5f4c7f2d 100644 --- a/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py +++ b/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py @@ -634,8 +634,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -737,8 +738,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -870,8 +872,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -958,8 +961,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1039,8 +1043,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1172,8 +1177,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1269,8 +1275,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1377,8 +1384,9 @@ def test_get_rest_required_fields(request_type=compute.GetGlobalForwardingRuleRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1512,8 +1520,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1614,6 +1623,77 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1647,8 +1727,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1747,8 +1828,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1842,53 +1924,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1929,8 +1964,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2033,6 +2069,77 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2066,8 +2173,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2144,8 +2252,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2239,53 +2348,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2326,8 +2388,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2398,8 +2461,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2487,8 +2551,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2624,8 +2689,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ForwardingRuleList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2780,6 +2846,77 @@ def test_patch_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchGlobalForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2813,8 +2950,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2917,8 +3055,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3013,53 +3152,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "forwarding_rule": "sample2"} - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3101,8 +3193,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3206,6 +3299,77 @@ def test_patch_unary_rest(request_type): "subnetwork": "subnetwork_value", "target": "target_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchGlobalForwardingRuleRequest.meta.fields[ + "forwarding_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "forwarding_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["forwarding_rule_resource"][field])): + del request_init["forwarding_rule_resource"][field][i][subfield] + else: + del request_init["forwarding_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3239,8 +3403,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3321,8 +3486,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3417,53 +3583,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "forwarding_rule": "sample2"} - request_init["forwarding_rule_resource"] = { - "I_p_address": "I_p_address_value", - "I_p_protocol": "I_p_protocol_value", - "all_ports": True, - "allow_global_access": True, - "allow_psc_global_access": True, - "backend_service": "backend_service_value", - "base_forwarding_rule": "base_forwarding_rule_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_version": "ip_version_value", - "is_mirroring_collector": True, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "load_balancing_scheme": "load_balancing_scheme_value", - "metadata_filters": [ - { - "filter_labels": [{"name": "name_value", "value": "value_value"}], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "name": "name_value", - "network": "network_value", - "network_tier": "network_tier_value", - "no_automate_dns_zone": True, - "port_range": "port_range_value", - "ports": ["ports_value1", "ports_value2"], - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - "region": "region_value", - "self_link": "self_link_value", - "service_directory_registrations": [ - { - "namespace": "namespace_value", - "service": "service_value", - "service_directory_region": "service_directory_region_value", - } - ], - "service_label": "service_label_value", - "service_name": "service_name_value", - "source_ip_ranges": ["source_ip_ranges_value1", "source_ip_ranges_value2"], - "subnetwork": "subnetwork_value", - "target": "target_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3505,8 +3624,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3567,6 +3687,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsGlobalForwardingRuleRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3600,8 +3795,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3702,8 +3898,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3798,10 +3995,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3843,8 +4036,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3905,6 +4099,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsGlobalForwardingRuleRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3938,8 +4207,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4018,8 +4288,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4114,10 +4385,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4159,8 +4426,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4218,6 +4486,79 @@ def test_set_target_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "forwarding_rule": "sample2"} request_init["target_reference_resource"] = {"target": "target_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetGlobalForwardingRuleRequest.meta.fields[ + "target_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_reference_resource"][field]) + ): + del request_init["target_reference_resource"][field][i][subfield] + else: + del request_init["target_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4251,8 +4592,9 @@ def test_set_target_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4355,8 +4697,9 @@ def test_set_target_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4451,7 +4794,6 @@ def test_set_target_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "forwarding_rule": "sample2"} - request_init["target_reference_resource"] = {"target": "target_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4491,8 +4833,9 @@ def test_set_target_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4548,6 +4891,79 @@ def test_set_target_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "forwarding_rule": "sample2"} request_init["target_reference_resource"] = {"target": "target_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetGlobalForwardingRuleRequest.meta.fields[ + "target_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_reference_resource"][field]) + ): + del request_init["target_reference_resource"][field][i][subfield] + else: + del request_init["target_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4581,8 +4997,9 @@ def test_set_target_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4663,8 +5080,9 @@ def test_set_target_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4759,7 +5177,6 @@ def test_set_target_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "forwarding_rule": "sample2"} - request_init["target_reference_resource"] = {"target": "target_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4799,8 +5216,9 @@ def test_set_target_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py b/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py index af2f9230..009748ca 100644 --- a/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py +++ b/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py @@ -620,6 +620,90 @@ def test_attach_network_endpoints_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.meta.fields[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -653,8 +737,9 @@ def test_attach_network_endpoints_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -757,8 +842,9 @@ def test_attach_network_endpoints_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -856,17 +942,6 @@ def test_attach_network_endpoints_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network_endpoint_group": "sample2"} - request_init["global_network_endpoint_groups_attach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -910,8 +985,9 @@ def test_attach_network_endpoints_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -981,6 +1057,90 @@ def test_attach_network_endpoints_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.meta.fields[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1014,8 +1174,9 @@ def test_attach_network_endpoints_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1096,8 +1257,9 @@ def test_attach_network_endpoints_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1195,17 +1357,6 @@ def test_attach_network_endpoints_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network_endpoint_group": "sample2"} - request_init["global_network_endpoint_groups_attach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1249,8 +1400,9 @@ def test_attach_network_endpoints_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1342,8 +1494,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1445,8 +1598,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1579,8 +1733,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1667,8 +1822,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1748,8 +1904,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1882,8 +2039,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1948,6 +2106,90 @@ def test_detach_network_endpoints_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.meta.fields[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1981,8 +2223,9 @@ def test_detach_network_endpoints_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2085,8 +2328,9 @@ def test_detach_network_endpoints_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2184,17 +2428,6 @@ def test_detach_network_endpoints_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network_endpoint_group": "sample2"} - request_init["global_network_endpoint_groups_detach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2238,8 +2471,9 @@ def test_detach_network_endpoints_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2309,6 +2543,90 @@ def test_detach_network_endpoints_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.meta.fields[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2342,8 +2660,9 @@ def test_detach_network_endpoints_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2424,8 +2743,9 @@ def test_detach_network_endpoints_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2523,17 +2843,6 @@ def test_detach_network_endpoints_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network_endpoint_group": "sample2"} - request_init["global_network_endpoint_groups_detach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2577,8 +2886,9 @@ def test_detach_network_endpoints_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2662,8 +2972,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2755,8 +3066,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2890,8 +3202,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2978,6 +3291,81 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3011,8 +3399,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3111,8 +3500,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3207,39 +3597,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3280,8 +3637,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3370,6 +3728,81 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3403,8 +3836,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3481,8 +3915,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3577,39 +4012,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3650,8 +4052,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3722,8 +4125,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3811,8 +4215,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3948,8 +4353,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4071,10 +4477,11 @@ def test_list_network_endpoints_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4165,10 +4572,11 @@ def test_list_network_endpoints_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4315,10 +4723,11 @@ def test_list_network_endpoints_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_global_operations.py b/tests/unit/gapic/compute_v1/test_global_operations.py index 35bd6d8b..1e0921d9 100644 --- a/tests/unit/gapic/compute_v1/test_global_operations.py +++ b/tests/unit/gapic/compute_v1/test_global_operations.py @@ -602,8 +602,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -693,8 +694,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -831,8 +833,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -959,8 +962,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteGlobalOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteGlobalOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1036,8 +1040,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteGlobalOperationR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteGlobalOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteGlobalOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1171,8 +1176,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteGlobalOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteGlobalOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1259,8 +1265,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1358,8 +1365,9 @@ def test_get_rest_required_fields(request_type=compute.GetGlobalOperationRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1491,8 +1499,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1561,8 +1570,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1648,8 +1658,9 @@ def test_list_rest_required_fields(request_type=compute.ListGlobalOperationsRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1785,8 +1796,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1927,8 +1939,9 @@ def test_wait_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2026,8 +2039,9 @@ def test_wait_rest_required_fields(request_type=compute.WaitGlobalOperationReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2159,8 +2173,9 @@ def test_wait_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_global_organization_operations.py b/tests/unit/gapic/compute_v1/test_global_organization_operations.py index 23cdd5f6..4c45a951 100644 --- a/tests/unit/gapic/compute_v1/test_global_organization_operations.py +++ b/tests/unit/gapic/compute_v1/test_global_organization_operations.py @@ -612,10 +612,11 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteGlobalOrganizationOperationResponse.pb( + # Convert return value to protobuf type + return_value = compute.DeleteGlobalOrganizationOperationResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -691,10 +692,11 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteGlobalOrganizationOperationResponse.pb( + # Convert return value to protobuf type + return_value = compute.DeleteGlobalOrganizationOperationResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -822,10 +824,11 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteGlobalOrganizationOperationResponse.pb( + # Convert return value to protobuf type + return_value = compute.DeleteGlobalOrganizationOperationResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -911,8 +914,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1010,8 +1014,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1134,8 +1139,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1203,8 +1209,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py index d1eb61b2..08152c25 100644 --- a/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py +++ b/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -644,8 +644,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -747,8 +748,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -881,8 +883,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -969,8 +972,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1050,8 +1054,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1184,8 +1189,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1262,8 +1268,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1353,8 +1360,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1488,8 +1496,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1568,6 +1577,81 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1601,8 +1685,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1701,8 +1786,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1797,31 +1883,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1862,8 +1923,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1944,6 +2006,81 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertGlobalPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1977,8 +2114,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2055,8 +2193,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2151,31 +2290,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2216,8 +2330,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2288,8 +2403,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2377,8 +2493,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2515,8 +2632,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2649,6 +2767,81 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchGlobalPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2682,8 +2875,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2786,8 +2980,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2883,31 +3078,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2949,8 +3119,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3032,6 +3203,81 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchGlobalPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3065,8 +3311,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3147,8 +3394,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3244,31 +3492,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3310,8 +3533,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_health_checks.py b/tests/unit/gapic/compute_v1/test_health_checks.py index efb2c3f4..4993fcdb 100644 --- a/tests/unit/gapic/compute_v1/test_health_checks.py +++ b/tests/unit/gapic/compute_v1/test_health_checks.py @@ -580,8 +580,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthChecksAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthChecksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -671,8 +672,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthChecksAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthChecksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -809,8 +811,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthChecksAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthChecksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -965,8 +968,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1066,8 +1070,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteHealthCheckReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1199,8 +1204,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1287,8 +1293,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1368,8 +1375,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1501,8 +1509,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1579,8 +1588,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheck.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1668,8 +1678,9 @@ def test_get_rest_required_fields(request_type=compute.GetHealthCheckRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheck.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1799,8 +1810,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheck.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1918,6 +1930,75 @@ def test_insert_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertHealthCheckRequest.meta.fields["health_check_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1951,8 +2032,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2049,8 +2131,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertHealthCheckReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2144,70 +2227,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2246,8 +2265,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2365,6 +2385,75 @@ def test_insert_unary_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertHealthCheckRequest.meta.fields["health_check_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2398,8 +2487,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2476,8 +2566,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2571,70 +2662,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2673,8 +2700,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2743,8 +2771,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2830,8 +2859,9 @@ def test_list_rest_required_fields(request_type=compute.ListHealthChecksRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2967,8 +2997,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3140,6 +3171,75 @@ def test_patch_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchHealthCheckRequest.meta.fields["health_check_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3173,8 +3273,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3275,8 +3376,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchHealthCheckRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3371,70 +3473,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "health_check": "sample2"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3474,8 +3512,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3594,6 +3633,75 @@ def test_patch_unary_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchHealthCheckRequest.meta.fields["health_check_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3627,8 +3735,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3707,8 +3816,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchHealthCheckR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3803,70 +3913,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "health_check": "sample2"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3906,8 +3952,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4026,6 +4073,75 @@ def test_update_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateHealthCheckRequest.meta.fields["health_check_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4059,8 +4175,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4161,8 +4278,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateHealthCheckReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4257,70 +4375,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "health_check": "sample2"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4360,8 +4414,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4480,6 +4535,75 @@ def test_update_unary_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateHealthCheckRequest.meta.fields["health_check_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4513,8 +4637,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4595,8 +4720,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4691,70 +4817,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "health_check": "sample2"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4794,8 +4856,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_image_family_views.py b/tests/unit/gapic/compute_v1/test_image_family_views.py index 3002a4d2..bb581a93 100644 --- a/tests/unit/gapic/compute_v1/test_image_family_views.py +++ b/tests/unit/gapic/compute_v1/test_image_family_views.py @@ -595,8 +595,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ImageFamilyView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ImageFamilyView.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -676,8 +677,9 @@ def test_get_rest_required_fields(request_type=compute.GetImageFamilyViewRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ImageFamilyView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ImageFamilyView.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -813,8 +815,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ImageFamilyView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ImageFamilyView.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_images.py b/tests/unit/gapic/compute_v1/test_images.py index 8d21fad0..b8ed4020 100644 --- a/tests/unit/gapic/compute_v1/test_images.py +++ b/tests/unit/gapic/compute_v1/test_images.py @@ -586,8 +586,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -687,8 +688,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteImageRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -816,8 +818,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -904,8 +907,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -983,8 +987,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteImageReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1112,8 +1117,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1174,6 +1180,79 @@ def test_deprecate_rest(request_type): "replacement": "replacement_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeprecateImageRequest.meta.fields[ + "deprecation_status_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "deprecation_status_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["deprecation_status_resource"][field]) + ): + del request_init["deprecation_status_resource"][field][i][subfield] + else: + del request_init["deprecation_status_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1207,8 +1286,9 @@ def test_deprecate_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1309,8 +1389,9 @@ def test_deprecate_rest_required_fields(request_type=compute.DeprecateImageReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1401,13 +1482,6 @@ def test_deprecate_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} - request_init["deprecation_status_resource"] = { - "deleted": "deleted_value", - "deprecated": "deprecated_value", - "obsolete": "obsolete_value", - "replacement": "replacement_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1449,8 +1523,9 @@ def test_deprecate_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1514,6 +1589,79 @@ def test_deprecate_unary_rest(request_type): "replacement": "replacement_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeprecateImageRequest.meta.fields[ + "deprecation_status_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "deprecation_status_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["deprecation_status_resource"][field]) + ): + del request_init["deprecation_status_resource"][field][i][subfield] + else: + del request_init["deprecation_status_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1547,8 +1695,9 @@ def test_deprecate_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1629,8 +1778,9 @@ def test_deprecate_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1721,13 +1871,6 @@ def test_deprecate_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} - request_init["deprecation_status_resource"] = { - "deleted": "deleted_value", - "deprecated": "deprecated_value", - "obsolete": "obsolete_value", - "replacement": "replacement_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1769,8 +1912,9 @@ def test_deprecate_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1861,8 +2005,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Image.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1961,8 +2106,9 @@ def test_get_rest_required_fields(request_type=compute.GetImageRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Image.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2090,8 +2236,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Image.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2179,8 +2326,9 @@ def test_get_from_family_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Image.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2281,8 +2429,9 @@ def test_get_from_family_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Image.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2412,8 +2561,9 @@ def test_get_from_family_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Image.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2481,8 +2631,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2565,8 +2716,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2696,8 +2848,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2806,6 +2959,73 @@ def test_insert_rest(request_type): "status": "status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertImageRequest.meta.fields["image_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["image_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["image_resource"][field])): + del request_init["image_resource"][field][i][subfield] + else: + del request_init["image_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2839,8 +3059,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2942,8 +3163,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertImageRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3038,61 +3260,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["image_resource"] = { - "architecture": "architecture_value", - "archive_size_bytes": 1922, - "creation_timestamp": "creation_timestamp_value", - "deprecated": { - "deleted": "deleted_value", - "deprecated": "deprecated_value", - "obsolete": "obsolete_value", - "replacement": "replacement_value", - "state": "state_value", - }, - "description": "description_value", - "disk_size_gb": 1261, - "family": "family_value", - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "image_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "name": "name_value", - "raw_disk": { - "container_type": "container_type_value", - "sha1_checksum": "sha1_checksum_value", - "source": "source_value", - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "shielded_instance_initial_state": { - "dbs": [{"content": "content_value", "file_type": "file_type_value"}], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_type": "source_type_value", - "status": "status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3131,8 +3298,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3240,6 +3408,73 @@ def test_insert_unary_rest(request_type): "status": "status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertImageRequest.meta.fields["image_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["image_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["image_resource"][field])): + del request_init["image_resource"][field][i][subfield] + else: + del request_init["image_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3273,8 +3508,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3354,8 +3590,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertImageReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3450,61 +3687,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["image_resource"] = { - "architecture": "architecture_value", - "archive_size_bytes": 1922, - "creation_timestamp": "creation_timestamp_value", - "deprecated": { - "deleted": "deleted_value", - "deprecated": "deprecated_value", - "obsolete": "obsolete_value", - "replacement": "replacement_value", - "state": "state_value", - }, - "description": "description_value", - "disk_size_gb": 1261, - "family": "family_value", - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "image_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "name": "name_value", - "raw_disk": { - "container_type": "container_type_value", - "sha1_checksum": "sha1_checksum_value", - "source": "source_value", - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "shielded_instance_initial_state": { - "dbs": [{"content": "content_value", "file_type": "file_type_value"}], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_type": "source_type_value", - "status": "status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3543,8 +3725,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3612,8 +3795,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ImageList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ImageList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3699,8 +3883,9 @@ def test_list_rest_required_fields(request_type=compute.ListImagesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ImageList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ImageList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3830,8 +4015,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ImageList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ImageList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3993,6 +4179,73 @@ def test_patch_rest(request_type): "status": "status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchImageRequest.meta.fields["image_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["image_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["image_resource"][field])): + del request_init["image_resource"][field][i][subfield] + else: + del request_init["image_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4026,8 +4279,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4128,8 +4382,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchImageRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4220,61 +4475,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} - request_init["image_resource"] = { - "architecture": "architecture_value", - "archive_size_bytes": 1922, - "creation_timestamp": "creation_timestamp_value", - "deprecated": { - "deleted": "deleted_value", - "deprecated": "deprecated_value", - "obsolete": "obsolete_value", - "replacement": "replacement_value", - "state": "state_value", - }, - "description": "description_value", - "disk_size_gb": 1261, - "family": "family_value", - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "image_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "name": "name_value", - "raw_disk": { - "container_type": "container_type_value", - "sha1_checksum": "sha1_checksum_value", - "source": "source_value", - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "shielded_instance_initial_state": { - "dbs": [{"content": "content_value", "file_type": "file_type_value"}], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_type": "source_type_value", - "status": "status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4314,8 +4514,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4425,6 +4626,73 @@ def test_patch_unary_rest(request_type): "status": "status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchImageRequest.meta.fields["image_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["image_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["image_resource"][field])): + del request_init["image_resource"][field][i][subfield] + else: + del request_init["image_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4458,8 +4726,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4538,8 +4807,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchImageRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4630,61 +4900,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} - request_init["image_resource"] = { - "architecture": "architecture_value", - "archive_size_bytes": 1922, - "creation_timestamp": "creation_timestamp_value", - "deprecated": { - "deleted": "deleted_value", - "deprecated": "deprecated_value", - "obsolete": "obsolete_value", - "replacement": "replacement_value", - "state": "state_value", - }, - "description": "description_value", - "disk_size_gb": 1261, - "family": "family_value", - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "image_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "name": "name_value", - "raw_disk": { - "container_type": "container_type_value", - "sha1_checksum": "sha1_checksum_value", - "source": "source_value", - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "shielded_instance_initial_state": { - "dbs": [{"content": "content_value", "file_type": "file_type_value"}], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_type": "source_type_value", - "status": "status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4724,8 +4939,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4857,6 +5073,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyImageRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4871,8 +5162,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4954,8 +5246,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5048,83 +5341,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5166,8 +5382,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5228,6 +5445,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsImageRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5261,8 +5553,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5361,8 +5654,9 @@ def test_set_labels_rest_required_fields(request_type=compute.SetLabelsImageRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5453,10 +5747,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5498,8 +5788,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5560,6 +5851,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsImageRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5593,8 +5959,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5673,8 +6040,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5765,10 +6133,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5810,8 +6174,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5871,6 +6236,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsImageRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5883,8 +6323,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5964,8 +6405,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6060,9 +6502,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6104,8 +6543,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/tests/unit/gapic/compute_v1/test_instance_group_managers.py index cf712998..9469e1ab 100644 --- a/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -608,6 +608,88 @@ def test_abandon_instances_rest(request_type): request_init["instance_group_managers_abandon_instances_request_resource"] = { "instances": ["instances_value1", "instances_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AbandonInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_abandon_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_abandon_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_abandon_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_abandon_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_abandon_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -641,8 +723,9 @@ def test_abandon_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -749,8 +832,9 @@ def test_abandon_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -851,9 +935,6 @@ def test_abandon_instances_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_abandon_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -900,8 +981,9 @@ def test_abandon_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -966,6 +1048,88 @@ def test_abandon_instances_unary_rest(request_type): request_init["instance_group_managers_abandon_instances_request_resource"] = { "instances": ["instances_value1", "instances_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AbandonInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_abandon_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_abandon_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_abandon_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_abandon_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_abandon_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -999,8 +1163,9 @@ def test_abandon_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1085,8 +1250,9 @@ def test_abandon_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1187,9 +1353,6 @@ def test_abandon_instances_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_abandon_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1236,8 +1399,9 @@ def test_abandon_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1311,8 +1475,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1402,10 +1567,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagerAggregatedList.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1543,8 +1707,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1676,6 +1841,88 @@ def test_apply_updates_to_instances_rest(request_type): "minimal_action": "minimal_action_value", "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_apply_updates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_apply_updates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_apply_updates_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_apply_updates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_apply_updates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1709,8 +1956,9 @@ def test_apply_updates_to_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1815,8 +2063,9 @@ def test_apply_updates_to_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1919,12 +2168,6 @@ def test_apply_updates_to_instances_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_apply_updates_request_resource"] = { - "all_instances": True, - "instances": ["instances_value1", "instances_value2"], - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1971,8 +2214,9 @@ def test_apply_updates_to_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2040,6 +2284,88 @@ def test_apply_updates_to_instances_unary_rest(request_type): "minimal_action": "minimal_action_value", "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_apply_updates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_apply_updates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_apply_updates_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_apply_updates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_apply_updates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2073,8 +2399,9 @@ def test_apply_updates_to_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2157,8 +2484,9 @@ def test_apply_updates_to_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2261,12 +2589,6 @@ def test_apply_updates_to_instances_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_apply_updates_request_resource"] = { - "all_instances": True, - "instances": ["instances_value1", "instances_value2"], - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2313,8 +2635,9 @@ def test_apply_updates_to_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2386,6 +2709,88 @@ def test_create_instances_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_create_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_create_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_create_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_create_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_create_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2419,8 +2824,9 @@ def test_create_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2527,8 +2933,9 @@ def test_create_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2629,16 +3036,6 @@ def test_create_instances_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_create_instances_request_resource"] = { - "instances": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2685,8 +3082,9 @@ def test_create_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2758,6 +3156,88 @@ def test_create_instances_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_create_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_create_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_create_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_create_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_create_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2791,8 +3271,9 @@ def test_create_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2877,8 +3358,9 @@ def test_create_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2979,16 +3461,6 @@ def test_create_instances_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_create_instances_request_resource"] = { - "instances": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3035,8 +3507,9 @@ def test_create_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3131,8 +3604,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3238,8 +3712,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3381,8 +3856,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3474,8 +3950,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3559,8 +4036,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3702,8 +4180,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3766,6 +4245,88 @@ def test_delete_instances_rest(request_type): "instances": ["instances_value1", "instances_value2"], "skip_instances_on_validation_error": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeleteInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_delete_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_delete_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_delete_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_delete_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_delete_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3799,8 +4360,9 @@ def test_delete_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3907,8 +4469,9 @@ def test_delete_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4009,10 +4572,6 @@ def test_delete_instances_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_delete_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"], - "skip_instances_on_validation_error": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4059,8 +4618,9 @@ def test_delete_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4126,6 +4686,88 @@ def test_delete_instances_unary_rest(request_type): "instances": ["instances_value1", "instances_value2"], "skip_instances_on_validation_error": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeleteInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_delete_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_delete_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_delete_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_delete_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_delete_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4159,8 +4801,9 @@ def test_delete_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4245,8 +4888,9 @@ def test_delete_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4347,10 +4991,6 @@ def test_delete_instances_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_delete_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"], - "skip_instances_on_validation_error": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4397,8 +5037,9 @@ def test_delete_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4463,6 +5104,90 @@ def test_delete_per_instance_configs_rest(request_type): request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = { "names": ["names_value1", "names_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4496,8 +5221,9 @@ def test_delete_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4602,8 +5328,9 @@ def test_delete_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4706,9 +5433,6 @@ def test_delete_per_instance_configs_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = { - "names": ["names_value1", "names_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4755,8 +5479,9 @@ def test_delete_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4821,6 +5546,90 @@ def test_delete_per_instance_configs_unary_rest(request_type): request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = { "names": ["names_value1", "names_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4854,8 +5663,9 @@ def test_delete_per_instance_configs_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4938,8 +5748,9 @@ def test_delete_per_instance_configs_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5042,9 +5853,6 @@ def test_delete_per_instance_configs_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = { - "names": ["names_value1", "names_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5091,8 +5899,9 @@ def test_delete_per_instance_configs_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5182,8 +5991,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManager.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5281,8 +6091,9 @@ def test_get_rest_required_fields(request_type=compute.GetInstanceGroupManagerRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManager.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5426,8 +6237,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManager.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5551,6 +6363,81 @@ def test_insert_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5584,8 +6471,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5688,8 +6576,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5784,75 +6673,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5898,8 +6718,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6029,6 +6850,81 @@ def test_insert_unary_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6062,8 +6958,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6144,8 +7041,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6240,75 +7138,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6354,8 +7183,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6431,8 +7261,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6524,8 +7355,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6667,8 +7499,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6793,10 +7626,9 @@ def test_list_errors_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListErrorsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6889,10 +7721,11 @@ def test_list_errors_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListErrorsResponse.pb( + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListErrorsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7046,10 +7879,9 @@ def test_list_errors_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListErrorsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7181,10 +8013,11 @@ def test_list_managed_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb( + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7277,12 +8110,11 @@ def test_list_managed_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( - compute.InstanceGroupManagersListManagedInstancesResponse.pb( - return_value - ) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb( + return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7437,10 +8269,11 @@ def test_list_managed_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb( + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7573,10 +8406,11 @@ def test_list_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb( + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7669,10 +8503,11 @@ def test_list_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( - compute.InstanceGroupManagersListPerInstanceConfigsResp.pb(return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb( + return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7828,10 +8663,11 @@ def test_list_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb( + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8021,6 +8857,81 @@ def test_patch_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8054,8 +8965,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8162,8 +9074,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8263,75 +9176,6 @@ def test_patch_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8382,8 +9226,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8518,6 +9363,81 @@ def test_patch_unary_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8551,8 +9471,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8637,8 +9558,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8738,75 +9660,6 @@ def test_patch_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8857,8 +9710,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8934,6 +9788,88 @@ def test_patch_per_instance_configs_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8967,8 +9903,9 @@ def test_patch_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9075,8 +10012,9 @@ def test_patch_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9179,16 +10117,6 @@ def test_patch_per_instance_configs_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_patch_per_instance_configs_req_resource"] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9237,8 +10165,9 @@ def test_patch_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9312,6 +10241,88 @@ def test_patch_per_instance_configs_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9345,8 +10356,9 @@ def test_patch_per_instance_configs_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9431,8 +10443,9 @@ def test_patch_per_instance_configs_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9535,16 +10548,6 @@ def test_patch_per_instance_configs_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_patch_per_instance_configs_req_resource"] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9593,8 +10596,9 @@ def test_patch_per_instance_configs_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9661,6 +10665,88 @@ def test_recreate_instances_rest(request_type): request_init["instance_group_managers_recreate_instances_request_resource"] = { "instances": ["instances_value1", "instances_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RecreateInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_recreate_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_recreate_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_recreate_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_recreate_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_recreate_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9694,8 +10780,9 @@ def test_recreate_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9802,8 +10889,9 @@ def test_recreate_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9904,9 +10992,6 @@ def test_recreate_instances_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_recreate_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9953,8 +11038,9 @@ def test_recreate_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10019,6 +11105,88 @@ def test_recreate_instances_unary_rest(request_type): request_init["instance_group_managers_recreate_instances_request_resource"] = { "instances": ["instances_value1", "instances_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RecreateInstancesInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_recreate_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_recreate_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_recreate_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_recreate_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_recreate_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10052,8 +11220,9 @@ def test_recreate_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10138,8 +11307,9 @@ def test_recreate_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10240,9 +11410,6 @@ def test_recreate_instances_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_recreate_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10289,8 +11456,9 @@ def test_recreate_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10385,8 +11553,9 @@ def test_resize_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10504,8 +11673,9 @@ def test_resize_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10659,8 +11829,9 @@ def test_resize_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10753,8 +11924,9 @@ def test_resize_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10850,8 +12022,9 @@ def test_resize_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11005,8 +12178,9 @@ def test_resize_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11069,6 +12243,88 @@ def test_set_instance_template_rest(request_type): request_init["instance_group_managers_set_instance_template_request_resource"] = { "instance_template": "instance_template_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetInstanceTemplateInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_set_instance_template_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_set_instance_template_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_set_instance_template_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_set_instance_template_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_set_instance_template_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11102,8 +12358,9 @@ def test_set_instance_template_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11210,8 +12467,9 @@ def test_set_instance_template_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11312,9 +12570,6 @@ def test_set_instance_template_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_set_instance_template_request_resource"] = { - "instance_template": "instance_template_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11361,8 +12616,9 @@ def test_set_instance_template_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11427,6 +12683,88 @@ def test_set_instance_template_unary_rest(request_type): request_init["instance_group_managers_set_instance_template_request_resource"] = { "instance_template": "instance_template_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetInstanceTemplateInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_set_instance_template_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_set_instance_template_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_set_instance_template_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_set_instance_template_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_set_instance_template_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11460,8 +12798,9 @@ def test_set_instance_template_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11546,8 +12885,9 @@ def test_set_instance_template_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11648,9 +12988,6 @@ def test_set_instance_template_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_set_instance_template_request_resource"] = { - "instance_template": "instance_template_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11697,8 +13034,9 @@ def test_set_instance_template_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11764,6 +13102,88 @@ def test_set_target_pools_rest(request_type): "fingerprint": "fingerprint_value", "target_pools": ["target_pools_value1", "target_pools_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetPoolsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_set_target_pools_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_set_target_pools_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_set_target_pools_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_set_target_pools_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_set_target_pools_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11797,8 +13217,9 @@ def test_set_target_pools_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11905,8 +13326,9 @@ def test_set_target_pools_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12007,10 +13429,6 @@ def test_set_target_pools_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_set_target_pools_request_resource"] = { - "fingerprint": "fingerprint_value", - "target_pools": ["target_pools_value1", "target_pools_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12057,8 +13475,9 @@ def test_set_target_pools_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12124,6 +13543,88 @@ def test_set_target_pools_unary_rest(request_type): "fingerprint": "fingerprint_value", "target_pools": ["target_pools_value1", "target_pools_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetPoolsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_set_target_pools_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_set_target_pools_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_set_target_pools_request_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_set_target_pools_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_set_target_pools_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12157,8 +13658,9 @@ def test_set_target_pools_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12243,8 +13745,9 @@ def test_set_target_pools_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12345,10 +13848,6 @@ def test_set_target_pools_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_set_target_pools_request_resource"] = { - "fingerprint": "fingerprint_value", - "target_pools": ["target_pools_value1", "target_pools_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12395,8 +13894,9 @@ def test_set_target_pools_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12468,6 +13968,90 @@ def test_update_per_instance_configs_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_update_per_instance_configs_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12501,8 +14085,9 @@ def test_update_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12609,8 +14194,9 @@ def test_update_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12713,16 +14299,6 @@ def test_update_per_instance_configs_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_update_per_instance_configs_req_resource"] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12771,8 +14347,9 @@ def test_update_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12846,6 +14423,90 @@ def test_update_per_instance_configs_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.meta.fields[ + "instance_group_managers_update_per_instance_configs_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ][field] + ), + ): + del request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ][field][i][subfield] + else: + del request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12879,8 +14540,9 @@ def test_update_per_instance_configs_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12965,8 +14627,9 @@ def test_update_per_instance_configs_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13069,16 +14732,6 @@ def test_update_per_instance_configs_unary_rest_bad_request( "zone": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_managers_update_per_instance_configs_req_resource"] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13127,8 +14780,9 @@ def test_update_per_instance_configs_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_instance_groups.py b/tests/unit/gapic/compute_v1/test_instance_groups.py index d4811f98..e4c57930 100644 --- a/tests/unit/gapic/compute_v1/test_instance_groups.py +++ b/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -580,6 +580,88 @@ def test_add_instances_rest(request_type): request_init["instance_groups_add_instances_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddInstancesInstanceGroupRequest.meta.fields[ + "instance_groups_add_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_add_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instance_groups_add_instances_request_resource"][ + field + ] + ), + ): + del request_init["instance_groups_add_instances_request_resource"][ + field + ][i][subfield] + else: + del request_init["instance_groups_add_instances_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -613,8 +695,9 @@ def test_add_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -721,8 +804,9 @@ def test_add_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -822,9 +906,6 @@ def test_add_instances_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_add_instances_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -871,8 +952,9 @@ def test_add_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -937,6 +1019,88 @@ def test_add_instances_unary_rest(request_type): request_init["instance_groups_add_instances_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddInstancesInstanceGroupRequest.meta.fields[ + "instance_groups_add_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_add_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instance_groups_add_instances_request_resource"][ + field + ] + ), + ): + del request_init["instance_groups_add_instances_request_resource"][ + field + ][i][subfield] + else: + del request_init["instance_groups_add_instances_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -970,8 +1134,9 @@ def test_add_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1056,8 +1221,9 @@ def test_add_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1157,9 +1323,6 @@ def test_add_instances_unary_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_add_instances_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1206,8 +1369,9 @@ def test_add_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1281,8 +1445,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1372,8 +1537,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1510,8 +1676,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1670,8 +1837,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1775,8 +1943,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteInstanceGroupReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1918,8 +2087,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2011,8 +2181,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2096,8 +2267,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2239,8 +2411,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2322,8 +2495,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2415,8 +2589,9 @@ def test_get_rest_required_fields(request_type=compute.GetInstanceGroupRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2560,8 +2735,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2631,6 +2807,77 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceGroupRequest.meta.fields[ + "instance_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance_group_resource"][field])): + del request_init["instance_group_resource"][field][i][subfield] + else: + del request_init["instance_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2664,8 +2911,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2766,8 +3014,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertInstanceGroupReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2862,21 +3111,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["instance_group_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2918,8 +3152,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2991,6 +3226,77 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceGroupRequest.meta.fields[ + "instance_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance_group_resource"][field])): + del request_init["instance_group_resource"][field][i][subfield] + else: + del request_init["instance_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3024,8 +3330,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3106,8 +3413,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3202,21 +3510,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["instance_group_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3258,8 +3551,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3331,8 +3625,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3422,8 +3717,9 @@ def test_list_rest_required_fields(request_type=compute.ListInstanceGroupsReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3565,8 +3861,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3682,6 +3979,88 @@ def test_list_instances_rest(request_type): request_init["instance_groups_list_instances_request_resource"] = { "instance_state": "instance_state_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ListInstancesInstanceGroupsRequest.meta.fields[ + "instance_groups_list_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_list_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instance_groups_list_instances_request_resource"][ + field + ] + ), + ): + del request_init["instance_groups_list_instances_request_resource"][ + field + ][i][subfield] + else: + del request_init["instance_groups_list_instances_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3697,8 +4076,9 @@ def test_list_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupsListInstances.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3795,8 +4175,9 @@ def test_list_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupsListInstances.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3906,9 +4287,6 @@ def test_list_instances_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_list_instances_request_resource"] = { - "instance_state": "instance_state_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3955,8 +4333,9 @@ def test_list_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupsListInstances.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4087,6 +4466,88 @@ def test_remove_instances_rest(request_type): request_init["instance_groups_remove_instances_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveInstancesInstanceGroupRequest.meta.fields[ + "instance_groups_remove_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_remove_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_groups_remove_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_groups_remove_instances_request_resource" + ][field][i][subfield] + else: + del request_init["instance_groups_remove_instances_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4120,8 +4581,9 @@ def test_remove_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4228,8 +4690,9 @@ def test_remove_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4329,9 +4792,6 @@ def test_remove_instances_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_remove_instances_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4378,8 +4838,9 @@ def test_remove_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4444,6 +4905,88 @@ def test_remove_instances_unary_rest(request_type): request_init["instance_groups_remove_instances_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveInstancesInstanceGroupRequest.meta.fields[ + "instance_groups_remove_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_remove_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_groups_remove_instances_request_resource" + ][field] + ), + ): + del request_init[ + "instance_groups_remove_instances_request_resource" + ][field][i][subfield] + else: + del request_init["instance_groups_remove_instances_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4477,8 +5020,9 @@ def test_remove_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4563,8 +5107,9 @@ def test_remove_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4664,9 +5209,6 @@ def test_remove_instances_unary_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_remove_instances_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4713,8 +5255,9 @@ def test_remove_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4780,6 +5323,88 @@ def test_set_named_ports_rest(request_type): "fingerprint": "fingerprint_value", "named_ports": [{"name": "name_value", "port": 453}], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNamedPortsInstanceGroupRequest.meta.fields[ + "instance_groups_set_named_ports_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_set_named_ports_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_groups_set_named_ports_request_resource" + ][field] + ), + ): + del request_init[ + "instance_groups_set_named_ports_request_resource" + ][field][i][subfield] + else: + del request_init["instance_groups_set_named_ports_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4813,8 +5438,9 @@ def test_set_named_ports_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4921,8 +5547,9 @@ def test_set_named_ports_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5022,10 +5649,6 @@ def test_set_named_ports_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_set_named_ports_request_resource"] = { - "fingerprint": "fingerprint_value", - "named_ports": [{"name": "name_value", "port": 453}], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5072,8 +5695,9 @@ def test_set_named_ports_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5139,6 +5763,88 @@ def test_set_named_ports_unary_rest(request_type): "fingerprint": "fingerprint_value", "named_ports": [{"name": "name_value", "port": 453}], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNamedPortsInstanceGroupRequest.meta.fields[ + "instance_groups_set_named_ports_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_groups_set_named_ports_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instance_groups_set_named_ports_request_resource" + ][field] + ), + ): + del request_init[ + "instance_groups_set_named_ports_request_resource" + ][field][i][subfield] + else: + del request_init["instance_groups_set_named_ports_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5172,8 +5878,9 @@ def test_set_named_ports_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5258,8 +5965,9 @@ def test_set_named_ports_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5359,10 +6067,6 @@ def test_set_named_ports_unary_rest_bad_request( "zone": "sample2", "instance_group": "sample3", } - request_init["instance_groups_set_named_ports_request_resource"] = { - "fingerprint": "fingerprint_value", - "named_ports": [{"name": "name_value", "port": 453}], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5409,8 +6113,9 @@ def test_set_named_ports_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_instance_templates.py b/tests/unit/gapic/compute_v1/test_instance_templates.py index e36fcb89..42e1a442 100644 --- a/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -608,8 +608,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -698,8 +699,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -836,8 +838,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -992,8 +995,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1095,8 +1099,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1228,8 +1233,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1316,8 +1322,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1397,8 +1404,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1530,8 +1538,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1604,8 +1613,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1689,8 +1699,9 @@ def test_get_rest_required_fields(request_type=compute.GetInstanceTemplateReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1824,8 +1835,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1893,8 +1905,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1977,8 +1990,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2110,8 +2124,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2350,6 +2365,79 @@ def test_insert_rest(request_type): ] }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceTemplateRequest.meta.fields[ + "instance_template_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_template_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_template_resource"][field]) + ): + del request_init["instance_template_resource"][field][i][subfield] + else: + del request_init["instance_template_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2383,8 +2471,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2483,8 +2572,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2578,191 +2668,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["instance_template_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "region": "region_value", - "self_link": "self_link_value", - "source_instance": "source_instance_value", - "source_instance_params": { - "disk_configs": [ - { - "auto_delete": True, - "custom_image": "custom_image_value", - "device_name": "device_name_value", - "instantiate_from": "instantiate_from_value", - } - ] - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2803,8 +2708,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3045,6 +2951,79 @@ def test_insert_unary_rest(request_type): ] }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceTemplateRequest.meta.fields[ + "instance_template_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_template_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_template_resource"][field]) + ): + del request_init["instance_template_resource"][field][i][subfield] + else: + del request_init["instance_template_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3078,8 +3057,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3156,8 +3136,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3251,191 +3232,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["instance_template_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "region": "region_value", - "self_link": "self_link_value", - "source_instance": "source_instance_value", - "source_instance_params": { - "disk_configs": [ - { - "auto_delete": True, - "custom_image": "custom_image_value", - "device_name": "device_name_value", - "instantiate_from": "instantiate_from_value", - } - ] - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3476,8 +3272,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3548,8 +3345,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3635,8 +3433,9 @@ def test_list_rest_required_fields(request_type=compute.ListInstanceTemplatesReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3772,8 +3571,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3958,6 +3758,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyInstanceTemplateRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3972,8 +3847,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4055,8 +3931,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4151,83 +4028,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4269,8 +4069,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4330,6 +4131,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsInstanceTemplateRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4342,8 +4218,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4423,8 +4300,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4522,9 +4400,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4566,8 +4441,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_instances.py b/tests/unit/gapic/compute_v1/test_instances.py index 267b76c3..88ed7ed1 100644 --- a/tests/unit/gapic/compute_v1/test_instances.py +++ b/tests/unit/gapic/compute_v1/test_instances.py @@ -571,6 +571,77 @@ def test_add_access_config_rest(request_type): "set_public_ptr": True, "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAccessConfigInstanceRequest.meta.fields[ + "access_config_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "access_config_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_config_resource"][field])): + del request_init["access_config_resource"][field][i][subfield] + else: + del request_init["access_config_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -604,8 +675,9 @@ def test_add_access_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -724,8 +796,9 @@ def test_add_access_config_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -830,17 +903,6 @@ def test_add_access_config_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["access_config_resource"] = { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -888,8 +950,9 @@ def test_add_access_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -959,6 +1022,77 @@ def test_add_access_config_unary_rest(request_type): "set_public_ptr": True, "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAccessConfigInstanceRequest.meta.fields[ + "access_config_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "access_config_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_config_resource"][field])): + del request_init["access_config_resource"][field][i][subfield] + else: + del request_init["access_config_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -992,8 +1126,9 @@ def test_add_access_config_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1090,8 +1225,9 @@ def test_add_access_config_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1196,17 +1332,6 @@ def test_add_access_config_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["access_config_resource"] = { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1254,8 +1379,9 @@ def test_add_access_config_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1317,6 +1443,88 @@ def test_add_resource_policies_rest(request_type): request_init["instances_add_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesInstanceRequest.meta.fields[ + "instances_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_add_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "instances_add_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init["instances_add_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1350,8 +1558,9 @@ def test_add_resource_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1458,8 +1667,9 @@ def test_add_resource_policies_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1553,9 +1763,6 @@ def test_add_resource_policies_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1602,8 +1809,9 @@ def test_add_resource_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1664,6 +1872,88 @@ def test_add_resource_policies_unary_rest(request_type): request_init["instances_add_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesInstanceRequest.meta.fields[ + "instances_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_add_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "instances_add_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init["instances_add_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1697,8 +1987,9 @@ def test_add_resource_policies_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1783,8 +2074,9 @@ def test_add_resource_policies_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1878,9 +2170,6 @@ def test_add_resource_policies_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1927,8 +2216,9 @@ def test_add_resource_policies_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2002,8 +2292,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2093,8 +2384,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2229,8 +2521,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2399,6 +2692,75 @@ def test_attach_disk_rest(request_type): "source": "source_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AttachDiskInstanceRequest.meta.fields["attached_disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "attached_disk_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attached_disk_resource"][field])): + del request_init["attached_disk_resource"][field][i][subfield] + else: + del request_init["attached_disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2432,8 +2794,9 @@ def test_attach_disk_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2545,8 +2908,9 @@ def test_attach_disk_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2645,58 +3009,6 @@ def test_attach_disk_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["attached_disk_resource"] = { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [{"content": "content_value", "file_type": "file_type_value"}], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2743,8 +3055,9 @@ def test_attach_disk_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2854,6 +3167,75 @@ def test_attach_disk_unary_rest(request_type): "source": "source_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AttachDiskInstanceRequest.meta.fields["attached_disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "attached_disk_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attached_disk_resource"][field])): + del request_init["attached_disk_resource"][field][i][subfield] + else: + del request_init["attached_disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2887,8 +3269,9 @@ def test_attach_disk_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2978,8 +3361,9 @@ def test_attach_disk_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3078,58 +3462,6 @@ def test_attach_disk_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["attached_disk_resource"] = { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [{"content": "content_value", "file_type": "file_type_value"}], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3176,8 +3508,9 @@ def test_attach_disk_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3408,6 +3741,84 @@ def test_bulk_insert_rest(request_type): "per_instance_properties": {}, "source_instance_template": "source_instance_template_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertInstanceRequest.meta.fields[ + "bulk_insert_instance_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_instance_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["bulk_insert_instance_resource_resource"][field]), + ): + del request_init["bulk_insert_instance_resource_resource"][field][ + i + ][subfield] + else: + del request_init["bulk_insert_instance_resource_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3441,8 +3852,9 @@ def test_bulk_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3545,8 +3957,9 @@ def test_bulk_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3639,179 +4052,6 @@ def test_bulk_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["bulk_insert_instance_resource_resource"] = { - "count": 553, - "instance_properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "location_policy": {"locations": {}, "target_shape": "target_shape_value"}, - "min_count": 972, - "name_pattern": "name_pattern_value", - "per_instance_properties": {}, - "source_instance_template": "source_instance_template_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3853,8 +4093,9 @@ def test_bulk_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4084,6 +4325,84 @@ def test_bulk_insert_unary_rest(request_type): "per_instance_properties": {}, "source_instance_template": "source_instance_template_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertInstanceRequest.meta.fields[ + "bulk_insert_instance_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_instance_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["bulk_insert_instance_resource_resource"][field]), + ): + del request_init["bulk_insert_instance_resource_resource"][field][ + i + ][subfield] + else: + del request_init["bulk_insert_instance_resource_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4117,8 +4436,9 @@ def test_bulk_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4199,8 +4519,9 @@ def test_bulk_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4293,179 +4614,6 @@ def test_bulk_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["bulk_insert_instance_resource_resource"] = { - "count": 553, - "instance_properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "location_policy": {"locations": {}, "target_shape": "target_shape_value"}, - "min_count": 972, - "name_pattern": "name_pattern_value", - "per_instance_properties": {}, - "source_instance_template": "source_instance_template_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4507,8 +4655,9 @@ def test_bulk_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4598,8 +4747,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4703,8 +4853,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteInstanceRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4838,8 +4989,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4927,8 +5079,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5010,8 +5163,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5145,8 +5299,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5234,8 +5389,9 @@ def test_delete_access_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5361,8 +5517,9 @@ def test_delete_access_config_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5517,8 +5674,9 @@ def test_delete_access_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5608,8 +5766,9 @@ def test_delete_access_config_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5713,8 +5872,9 @@ def test_delete_access_config_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5869,8 +6029,9 @@ def test_delete_access_config_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5960,8 +6121,9 @@ def test_detach_disk_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6079,8 +6241,9 @@ def test_detach_disk_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6228,8 +6391,9 @@ def test_detach_disk_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6318,8 +6482,9 @@ def test_detach_disk_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6415,8 +6580,9 @@ def test_detach_disk_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6564,8 +6730,9 @@ def test_detach_disk_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6658,8 +6825,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6765,8 +6933,9 @@ def test_get_rest_required_fields(request_type=compute.GetInstanceRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6900,8 +7069,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6966,10 +7136,9 @@ def test_get_effective_firewalls_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstancesGetEffectiveFirewallsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstancesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7060,10 +7229,11 @@ def test_get_effective_firewalls_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstancesGetEffectiveFirewallsResponse.pb( + # Convert return value to protobuf type + return_value = compute.InstancesGetEffectiveFirewallsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7210,10 +7380,9 @@ def test_get_effective_firewalls_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstancesGetEffectiveFirewallsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstancesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7285,8 +7454,9 @@ def test_get_guest_attributes_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.GuestAttributes.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.GuestAttributes.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7380,8 +7550,9 @@ def test_get_guest_attributes_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.GuestAttributes.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.GuestAttributes.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7524,8 +7695,9 @@ def test_get_guest_attributes_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.GuestAttributes.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.GuestAttributes.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7594,8 +7766,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7682,8 +7855,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7819,8 +7993,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7888,8 +8063,9 @@ def test_get_screenshot_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Screenshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Screenshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7973,8 +8149,9 @@ def test_get_screenshot_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Screenshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Screenshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8110,8 +8287,9 @@ def test_get_screenshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Screenshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Screenshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8182,8 +8360,9 @@ def test_get_serial_port_output_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SerialPortOutput.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SerialPortOutput.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8277,8 +8456,9 @@ def test_get_serial_port_output_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SerialPortOutput.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SerialPortOutput.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8421,8 +8601,9 @@ def test_get_serial_port_output_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SerialPortOutput.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SerialPortOutput.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8489,8 +8670,9 @@ def test_get_shielded_instance_identity_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ShieldedInstanceIdentity.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ShieldedInstanceIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8573,8 +8755,9 @@ def test_get_shielded_instance_identity_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ShieldedInstanceIdentity.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ShieldedInstanceIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8715,8 +8898,9 @@ def test_get_shielded_instance_identity_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ShieldedInstanceIdentity.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ShieldedInstanceIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8951,6 +9135,73 @@ def test_insert_rest(request_type): }, "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceRequest.meta.fields["instance_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance_resource"][field])): + del request_init["instance_resource"][field][i][subfield] + else: + del request_init["instance_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8984,8 +9235,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9092,8 +9344,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertInstanceRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9190,186 +9443,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["instance_resource"] = { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "cpu_platform": "cpu_platform_value", - "creation_timestamp": "creation_timestamp_value", - "deletion_protection": True, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "display_device": {"enable_display": True}, - "fingerprint": "fingerprint_value", - "guest_accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ], - "hostname": "hostname_value", - "id": 205, - "instance_encryption_key": {}, - "key_revocation_action_type": "key_revocation_action_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_start_timestamp": "last_start_timestamp_value", - "last_stop_timestamp": "last_stop_timestamp_value", - "last_suspended_timestamp": "last_suspended_timestamp_value", - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "name": "name_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "params": {"resource_manager_tags": {}}, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, - "satisfies_pzs": True, - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "self_link": "self_link_value", - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "shielded_instance_integrity_policy": {"update_auto_learn_policy": True}, - "source_machine_image": "source_machine_image_value", - "source_machine_image_encryption_key": {}, - "start_restricted": True, - "status": "status_value", - "status_message": "status_message_value", - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9413,8 +9486,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9653,6 +9727,73 @@ def test_insert_unary_rest(request_type): }, "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceRequest.meta.fields["instance_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance_resource"][field])): + del request_init["instance_resource"][field][i][subfield] + else: + del request_init["instance_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9686,8 +9827,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9772,8 +9914,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9870,186 +10013,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["instance_resource"] = { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "cpu_platform": "cpu_platform_value", - "creation_timestamp": "creation_timestamp_value", - "deletion_protection": True, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "display_device": {"enable_display": True}, - "fingerprint": "fingerprint_value", - "guest_accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ], - "hostname": "hostname_value", - "id": 205, - "instance_encryption_key": {}, - "key_revocation_action_type": "key_revocation_action_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_start_timestamp": "last_start_timestamp_value", - "last_stop_timestamp": "last_stop_timestamp_value", - "last_suspended_timestamp": "last_suspended_timestamp_value", - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "name": "name_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "params": {"resource_manager_tags": {}}, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, - "satisfies_pzs": True, - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "self_link": "self_link_value", - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "shielded_instance_integrity_policy": {"update_auto_learn_policy": True}, - "source_machine_image": "source_machine_image_value", - "source_machine_image_encryption_key": {}, - "start_restricted": True, - "status": "status_value", - "status_message": "status_message_value", - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10093,8 +10056,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10168,8 +10132,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10259,8 +10224,9 @@ def test_list_rest_required_fields(request_type=compute.ListInstancesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10396,8 +10362,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10521,8 +10488,9 @@ def test_list_referrers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceListReferrers.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceListReferrers.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10618,8 +10586,9 @@ def test_list_referrers_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceListReferrers.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceListReferrers.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10765,8 +10734,9 @@ def test_list_referrers_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceListReferrers.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceListReferrers.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10883,6 +10853,88 @@ def test_remove_resource_policies_rest(request_type): request_init["instances_remove_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveResourcePoliciesInstanceRequest.meta.fields[ + "instances_remove_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_remove_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_remove_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "instances_remove_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init["instances_remove_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10916,8 +10968,9 @@ def test_remove_resource_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11024,8 +11077,9 @@ def test_remove_resource_policies_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11119,9 +11173,6 @@ def test_remove_resource_policies_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_remove_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11168,8 +11219,9 @@ def test_remove_resource_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11230,6 +11282,88 @@ def test_remove_resource_policies_unary_rest(request_type): request_init["instances_remove_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveResourcePoliciesInstanceRequest.meta.fields[ + "instances_remove_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_remove_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_remove_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "instances_remove_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init["instances_remove_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11263,8 +11397,9 @@ def test_remove_resource_policies_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11349,8 +11484,9 @@ def test_remove_resource_policies_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11444,9 +11580,6 @@ def test_remove_resource_policies_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_remove_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11493,8 +11626,9 @@ def test_remove_resource_policies_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11585,8 +11719,9 @@ def test_reset_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11690,8 +11825,9 @@ def test_reset_rest_required_fields(request_type=compute.ResetInstanceRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11825,8 +11961,9 @@ def test_reset_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11914,8 +12051,9 @@ def test_reset_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11997,8 +12135,9 @@ def test_reset_unary_rest_required_fields(request_type=compute.ResetInstanceRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12132,8 +12271,9 @@ def test_reset_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12221,8 +12361,9 @@ def test_resume_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12326,8 +12467,9 @@ def test_resume_rest_required_fields(request_type=compute.ResumeInstanceRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12461,8 +12603,9 @@ def test_resume_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12550,8 +12693,9 @@ def test_resume_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12633,8 +12777,9 @@ def test_resume_unary_rest_required_fields(request_type=compute.ResumeInstanceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12768,8 +12913,9 @@ def test_resume_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12834,10 +12980,9 @@ def test_send_diagnostic_interrupt_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SendDiagnosticInterruptInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SendDiagnosticInterruptInstanceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12919,10 +13064,11 @@ def test_send_diagnostic_interrupt_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SendDiagnosticInterruptInstanceResponse.pb( + # Convert return value to protobuf type + return_value = compute.SendDiagnosticInterruptInstanceResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13062,10 +13208,9 @@ def test_send_diagnostic_interrupt_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SendDiagnosticInterruptInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SendDiagnosticInterruptInstanceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13153,8 +13298,9 @@ def test_set_deletion_protection_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13265,8 +13411,9 @@ def test_set_deletion_protection_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13407,8 +13554,9 @@ def test_set_deletion_protection_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13496,8 +13644,9 @@ def test_set_deletion_protection_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13586,8 +13735,9 @@ def test_set_deletion_protection_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13728,8 +13878,9 @@ def test_set_deletion_protection_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13817,8 +13968,9 @@ def test_set_disk_auto_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -13944,8 +14096,9 @@ def test_set_disk_auto_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14100,8 +14253,9 @@ def test_set_disk_auto_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14191,8 +14345,9 @@ def test_set_disk_auto_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14296,8 +14451,9 @@ def test_set_disk_auto_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14452,8 +14608,9 @@ def test_set_disk_auto_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14587,6 +14744,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyInstanceRequest.meta.fields[ + "zone_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_policy_request_resource"][field]) + ): + del request_init["zone_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -14601,8 +14833,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14688,8 +14921,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14783,83 +15017,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14906,8 +15063,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14969,6 +15127,83 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsInstanceRequest.meta.fields[ + "instances_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instances_set_labels_request_resource"][field]) + ): + del request_init["instances_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["instances_set_labels_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15002,8 +15237,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15108,8 +15344,9 @@ def test_set_labels_rest_required_fields(request_type=compute.SetLabelsInstanceR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15203,10 +15440,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15253,8 +15486,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15316,6 +15550,83 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsInstanceRequest.meta.fields[ + "instances_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instances_set_labels_request_resource"][field]) + ): + del request_init["instances_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["instances_set_labels_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15349,8 +15660,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15435,8 +15747,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15530,10 +15843,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15580,8 +15889,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15644,6 +15954,88 @@ def test_set_machine_resources_rest(request_type): {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMachineResourcesInstanceRequest.meta.fields[ + "instances_set_machine_resources_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_machine_resources_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_set_machine_resources_request_resource" + ][field] + ), + ): + del request_init[ + "instances_set_machine_resources_request_resource" + ][field][i][subfield] + else: + del request_init["instances_set_machine_resources_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15677,8 +16069,9 @@ def test_set_machine_resources_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15785,8 +16178,9 @@ def test_set_machine_resources_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15880,11 +16274,6 @@ def test_set_machine_resources_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_machine_resources_request_resource"] = { - "guest_accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15931,8 +16320,9 @@ def test_set_machine_resources_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15995,6 +16385,88 @@ def test_set_machine_resources_unary_rest(request_type): {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMachineResourcesInstanceRequest.meta.fields[ + "instances_set_machine_resources_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_machine_resources_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_set_machine_resources_request_resource" + ][field] + ), + ): + del request_init[ + "instances_set_machine_resources_request_resource" + ][field][i][subfield] + else: + del request_init["instances_set_machine_resources_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16028,8 +16500,9 @@ def test_set_machine_resources_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16114,8 +16587,9 @@ def test_set_machine_resources_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16209,11 +16683,6 @@ def test_set_machine_resources_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_machine_resources_request_resource"] = { - "guest_accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16260,8 +16729,9 @@ def test_set_machine_resources_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16322,6 +16792,88 @@ def test_set_machine_type_rest(request_type): request_init["instances_set_machine_type_request_resource"] = { "machine_type": "machine_type_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMachineTypeInstanceRequest.meta.fields[ + "instances_set_machine_type_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_machine_type_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instances_set_machine_type_request_resource"][ + field + ] + ), + ): + del request_init["instances_set_machine_type_request_resource"][ + field + ][i][subfield] + else: + del request_init["instances_set_machine_type_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16355,8 +16907,9 @@ def test_set_machine_type_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16463,8 +17016,9 @@ def test_set_machine_type_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16558,9 +17112,6 @@ def test_set_machine_type_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_machine_type_request_resource"] = { - "machine_type": "machine_type_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16607,8 +17158,9 @@ def test_set_machine_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16669,6 +17221,88 @@ def test_set_machine_type_unary_rest(request_type): request_init["instances_set_machine_type_request_resource"] = { "machine_type": "machine_type_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMachineTypeInstanceRequest.meta.fields[ + "instances_set_machine_type_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_machine_type_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instances_set_machine_type_request_resource"][ + field + ] + ), + ): + del request_init["instances_set_machine_type_request_resource"][ + field + ][i][subfield] + else: + del request_init["instances_set_machine_type_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16702,8 +17336,9 @@ def test_set_machine_type_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16788,8 +17423,9 @@ def test_set_machine_type_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16883,9 +17519,6 @@ def test_set_machine_type_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_machine_type_request_resource"] = { - "machine_type": "machine_type_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16932,8 +17565,9 @@ def test_set_machine_type_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16996,6 +17630,73 @@ def test_set_metadata_rest(request_type): "items": [{"key": "key_value", "value": "value_value"}], "kind": "kind_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMetadataInstanceRequest.meta.fields["metadata_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_resource"][field])): + del request_init["metadata_resource"][field][i][subfield] + else: + del request_init["metadata_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -17029,8 +17730,9 @@ def test_set_metadata_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17137,8 +17839,9 @@ def test_set_metadata_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17232,11 +17935,6 @@ def test_set_metadata_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["metadata_resource"] = { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17281,8 +17979,9 @@ def test_set_metadata_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17343,6 +18042,73 @@ def test_set_metadata_unary_rest(request_type): "items": [{"key": "key_value", "value": "value_value"}], "kind": "kind_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMetadataInstanceRequest.meta.fields["metadata_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_resource"][field])): + del request_init["metadata_resource"][field][i][subfield] + else: + del request_init["metadata_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -17376,8 +18142,9 @@ def test_set_metadata_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17462,8 +18229,9 @@ def test_set_metadata_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17557,11 +18325,6 @@ def test_set_metadata_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["metadata_resource"] = { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17606,8 +18369,9 @@ def test_set_metadata_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17666,6 +18430,88 @@ def test_set_min_cpu_platform_rest(request_type): request_init["instances_set_min_cpu_platform_request_resource"] = { "min_cpu_platform": "min_cpu_platform_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMinCpuPlatformInstanceRequest.meta.fields[ + "instances_set_min_cpu_platform_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_min_cpu_platform_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instances_set_min_cpu_platform_request_resource"][ + field + ] + ), + ): + del request_init["instances_set_min_cpu_platform_request_resource"][ + field + ][i][subfield] + else: + del request_init["instances_set_min_cpu_platform_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -17699,8 +18545,9 @@ def test_set_min_cpu_platform_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17807,8 +18654,9 @@ def test_set_min_cpu_platform_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -17902,9 +18750,6 @@ def test_set_min_cpu_platform_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_min_cpu_platform_request_resource"] = { - "min_cpu_platform": "min_cpu_platform_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17951,8 +18796,9 @@ def test_set_min_cpu_platform_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18013,6 +18859,88 @@ def test_set_min_cpu_platform_unary_rest(request_type): request_init["instances_set_min_cpu_platform_request_resource"] = { "min_cpu_platform": "min_cpu_platform_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetMinCpuPlatformInstanceRequest.meta.fields[ + "instances_set_min_cpu_platform_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_min_cpu_platform_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instances_set_min_cpu_platform_request_resource"][ + field + ] + ), + ): + del request_init["instances_set_min_cpu_platform_request_resource"][ + field + ][i][subfield] + else: + del request_init["instances_set_min_cpu_platform_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18046,8 +18974,9 @@ def test_set_min_cpu_platform_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18132,8 +19061,9 @@ def test_set_min_cpu_platform_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18227,9 +19157,6 @@ def test_set_min_cpu_platform_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_min_cpu_platform_request_resource"] = { - "min_cpu_platform": "min_cpu_platform_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18276,8 +19203,9 @@ def test_set_min_cpu_platform_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18339,6 +19267,81 @@ def test_set_name_rest(request_type): "current_name": "current_name_value", "name": "name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNameInstanceRequest.meta.fields[ + "instances_set_name_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_name_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instances_set_name_request_resource"][field]) + ): + del request_init["instances_set_name_request_resource"][field][i][ + subfield + ] + else: + del request_init["instances_set_name_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18372,8 +19375,9 @@ def test_set_name_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18478,8 +19482,9 @@ def test_set_name_rest_required_fields(request_type=compute.SetNameInstanceReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18571,10 +19576,6 @@ def test_set_name_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_name_request_resource"] = { - "current_name": "current_name_value", - "name": "name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18621,8 +19622,9 @@ def test_set_name_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18684,6 +19686,81 @@ def test_set_name_unary_rest(request_type): "current_name": "current_name_value", "name": "name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNameInstanceRequest.meta.fields[ + "instances_set_name_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_name_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instances_set_name_request_resource"][field]) + ): + del request_init["instances_set_name_request_resource"][field][i][ + subfield + ] + else: + del request_init["instances_set_name_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18717,8 +19794,9 @@ def test_set_name_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18803,8 +19881,9 @@ def test_set_name_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -18896,10 +19975,6 @@ def test_set_name_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_name_request_resource"] = { - "current_name": "current_name_value", - "name": "name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18946,8 +20021,9 @@ def test_set_name_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19022,6 +20098,73 @@ def test_set_scheduling_rest(request_type): "preemptible": True, "provisioning_model": "provisioning_model_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSchedulingInstanceRequest.meta.fields["scheduling_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["scheduling_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["scheduling_resource"][field])): + del request_init["scheduling_resource"][field][i][subfield] + else: + del request_init["scheduling_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19055,8 +20198,9 @@ def test_set_scheduling_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19163,8 +20307,9 @@ def test_set_scheduling_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19258,23 +20403,6 @@ def test_set_scheduling_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["scheduling_resource"] = { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19319,8 +20447,9 @@ def test_set_scheduling_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19393,6 +20522,73 @@ def test_set_scheduling_unary_rest(request_type): "preemptible": True, "provisioning_model": "provisioning_model_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSchedulingInstanceRequest.meta.fields["scheduling_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["scheduling_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["scheduling_resource"][field])): + del request_init["scheduling_resource"][field][i][subfield] + else: + del request_init["scheduling_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19426,8 +20622,9 @@ def test_set_scheduling_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19512,8 +20709,9 @@ def test_set_scheduling_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19607,23 +20805,6 @@ def test_set_scheduling_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["scheduling_resource"] = { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19668,8 +20849,9 @@ def test_set_scheduling_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19729,11 +20911,93 @@ def test_set_service_account_rest(request_type): "email": "email_value", "scopes": ["scopes_value1", "scopes_value2"], } - request = request_type(**request_init) + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetServiceAccountInstanceRequest.meta.fields[ + "instances_set_service_account_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_service_account_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instances_set_service_account_request_resource"][ + field + ] + ), + ): + del request_init["instances_set_service_account_request_resource"][ + field + ][i][subfield] + else: + del request_init["instances_set_service_account_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. return_value = compute.Operation( client_operation_id="client_operation_id_value", creation_timestamp="creation_timestamp_value", @@ -19762,8 +21026,9 @@ def test_set_service_account_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19870,8 +21135,9 @@ def test_set_service_account_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19965,10 +21231,6 @@ def test_set_service_account_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_service_account_request_resource"] = { - "email": "email_value", - "scopes": ["scopes_value1", "scopes_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20015,8 +21277,9 @@ def test_set_service_account_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20078,6 +21341,88 @@ def test_set_service_account_unary_rest(request_type): "email": "email_value", "scopes": ["scopes_value1", "scopes_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetServiceAccountInstanceRequest.meta.fields[ + "instances_set_service_account_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_set_service_account_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instances_set_service_account_request_resource"][ + field + ] + ), + ): + del request_init["instances_set_service_account_request_resource"][ + field + ][i][subfield] + else: + del request_init["instances_set_service_account_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20111,8 +21456,9 @@ def test_set_service_account_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20197,8 +21543,9 @@ def test_set_service_account_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20292,10 +21639,6 @@ def test_set_service_account_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_service_account_request_resource"] = { - "email": "email_value", - "scopes": ["scopes_value1", "scopes_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20342,8 +21685,9 @@ def test_set_service_account_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20404,6 +21748,88 @@ def test_set_shielded_instance_integrity_policy_rest(request_type): request_init["shielded_instance_integrity_policy_resource"] = { "update_auto_learn_policy": True } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.meta.fields[ + "shielded_instance_integrity_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "shielded_instance_integrity_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["shielded_instance_integrity_policy_resource"][ + field + ] + ), + ): + del request_init["shielded_instance_integrity_policy_resource"][ + field + ][i][subfield] + else: + del request_init["shielded_instance_integrity_policy_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20437,8 +21863,9 @@ def test_set_shielded_instance_integrity_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20549,8 +21976,9 @@ def test_set_shielded_instance_integrity_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20649,9 +22077,6 @@ def test_set_shielded_instance_integrity_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["shielded_instance_integrity_policy_resource"] = { - "update_auto_learn_policy": True - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20698,8 +22123,9 @@ def test_set_shielded_instance_integrity_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20762,6 +22188,88 @@ def test_set_shielded_instance_integrity_policy_unary_rest(request_type): request_init["shielded_instance_integrity_policy_resource"] = { "update_auto_learn_policy": True } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.meta.fields[ + "shielded_instance_integrity_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "shielded_instance_integrity_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["shielded_instance_integrity_policy_resource"][ + field + ] + ), + ): + del request_init["shielded_instance_integrity_policy_resource"][ + field + ][i][subfield] + else: + del request_init["shielded_instance_integrity_policy_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20795,8 +22303,9 @@ def test_set_shielded_instance_integrity_policy_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20885,8 +22394,9 @@ def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -20987,9 +22497,6 @@ def test_set_shielded_instance_integrity_policy_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["shielded_instance_integrity_policy_resource"] = { - "update_auto_learn_policy": True - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21036,8 +22543,9 @@ def test_set_shielded_instance_integrity_policy_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21101,6 +22609,73 @@ def test_set_tags_rest(request_type): "fingerprint": "fingerprint_value", "items": ["items_value1", "items_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTagsInstanceRequest.meta.fields["tags_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tags_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tags_resource"][field])): + del request_init["tags_resource"][field][i][subfield] + else: + del request_init["tags_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21134,8 +22709,9 @@ def test_set_tags_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21240,8 +22816,9 @@ def test_set_tags_rest_required_fields(request_type=compute.SetTagsInstanceReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21333,10 +22910,6 @@ def test_set_tags_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["tags_resource"] = { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21381,8 +22954,9 @@ def test_set_tags_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21442,6 +23016,73 @@ def test_set_tags_unary_rest(request_type): "fingerprint": "fingerprint_value", "items": ["items_value1", "items_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTagsInstanceRequest.meta.fields["tags_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tags_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tags_resource"][field])): + del request_init["tags_resource"][field][i][subfield] + else: + del request_init["tags_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21475,8 +23116,9 @@ def test_set_tags_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21561,8 +23203,9 @@ def test_set_tags_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21654,10 +23297,6 @@ def test_set_tags_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["tags_resource"] = { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21702,8 +23341,9 @@ def test_set_tags_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21792,8 +23432,9 @@ def test_simulate_maintenance_event_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -21899,8 +23540,9 @@ def test_simulate_maintenance_event_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22037,8 +23679,9 @@ def test_simulate_maintenance_event_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22126,8 +23769,9 @@ def test_simulate_maintenance_event_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22211,8 +23855,9 @@ def test_simulate_maintenance_event_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22349,8 +23994,9 @@ def test_simulate_maintenance_event_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22438,8 +24084,9 @@ def test_start_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22543,8 +24190,9 @@ def test_start_rest_required_fields(request_type=compute.StartInstanceRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22678,8 +24326,9 @@ def test_start_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22767,8 +24416,9 @@ def test_start_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22850,8 +24500,9 @@ def test_start_unary_rest_required_fields(request_type=compute.StartInstanceRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -22985,8 +24636,9 @@ def test_start_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23055,6 +24707,88 @@ def test_start_with_encryption_key_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StartWithEncryptionKeyInstanceRequest.meta.fields[ + "instances_start_with_encryption_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_start_with_encryption_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_start_with_encryption_key_request_resource" + ][field] + ), + ): + del request_init[ + "instances_start_with_encryption_key_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instances_start_with_encryption_key_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23088,8 +24822,9 @@ def test_start_with_encryption_key_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23196,8 +24931,9 @@ def test_start_with_encryption_key_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23291,20 +25027,6 @@ def test_start_with_encryption_key_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_start_with_encryption_key_request_resource"] = { - "disks": [ - { - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "source": "source_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23357,8 +25079,9 @@ def test_start_with_encryption_key_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23436,6 +25159,88 @@ def test_start_with_encryption_key_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StartWithEncryptionKeyInstanceRequest.meta.fields[ + "instances_start_with_encryption_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instances_start_with_encryption_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "instances_start_with_encryption_key_request_resource" + ][field] + ), + ): + del request_init[ + "instances_start_with_encryption_key_request_resource" + ][field][i][subfield] + else: + del request_init[ + "instances_start_with_encryption_key_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23469,8 +25274,9 @@ def test_start_with_encryption_key_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23555,8 +25361,9 @@ def test_start_with_encryption_key_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23650,20 +25457,6 @@ def test_start_with_encryption_key_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_start_with_encryption_key_request_resource"] = { - "disks": [ - { - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "source": "source_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23716,8 +25509,9 @@ def test_start_with_encryption_key_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23814,8 +25608,9 @@ def test_stop_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -23924,8 +25719,9 @@ def test_stop_rest_required_fields(request_type=compute.StopInstanceRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24064,8 +25860,9 @@ def test_stop_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24153,8 +25950,9 @@ def test_stop_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24241,8 +26039,9 @@ def test_stop_unary_rest_required_fields(request_type=compute.StopInstanceReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24381,8 +26180,9 @@ def test_stop_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24470,8 +26270,9 @@ def test_suspend_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24580,8 +26381,9 @@ def test_suspend_rest_required_fields(request_type=compute.SuspendInstanceReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24720,8 +26522,9 @@ def test_suspend_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24809,8 +26612,9 @@ def test_suspend_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -24899,8 +26703,9 @@ def test_suspend_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25039,8 +26844,9 @@ def test_suspend_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25098,6 +26904,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsInstanceRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -25110,8 +26991,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25195,8 +27077,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25292,9 +27175,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25341,8 +27221,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25580,6 +27461,73 @@ def test_update_rest(request_type): }, "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateInstanceRequest.meta.fields["instance_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance_resource"][field])): + del request_init["instance_resource"][field][i][subfield] + else: + del request_init["instance_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -25613,8 +27561,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25725,8 +27674,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateInstanceRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -25824,186 +27774,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instance_resource"] = { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "cpu_platform": "cpu_platform_value", - "creation_timestamp": "creation_timestamp_value", - "deletion_protection": True, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "display_device": {"enable_display": True}, - "fingerprint": "fingerprint_value", - "guest_accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ], - "hostname": "hostname_value", - "id": 205, - "instance_encryption_key": {}, - "key_revocation_action_type": "key_revocation_action_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_start_timestamp": "last_start_timestamp_value", - "last_stop_timestamp": "last_stop_timestamp_value", - "last_suspended_timestamp": "last_suspended_timestamp_value", - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "name": "name_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "params": {"resource_manager_tags": {}}, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, - "satisfies_pzs": True, - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "self_link": "self_link_value", - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "shielded_instance_integrity_policy": {"update_auto_learn_policy": True}, - "source_machine_image": "source_machine_image_value", - "source_machine_image_encryption_key": {}, - "start_restricted": True, - "status": "status_value", - "status_message": "status_message_value", - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26052,8 +27822,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -26293,6 +28064,73 @@ def test_update_unary_rest(request_type): }, "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateInstanceRequest.meta.fields["instance_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance_resource"][field])): + del request_init["instance_resource"][field][i][subfield] + else: + del request_init["instance_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -26326,8 +28164,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -26416,8 +28255,9 @@ def test_update_unary_rest_required_fields(request_type=compute.UpdateInstanceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -26471,230 +28311,50 @@ def test_update_unary_rest_interceptors(null_interceptor): transports.InstancesRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = compute.UpdateInstanceRequest.pb(compute.UpdateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.UpdateInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.update_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_unary_rest_bad_request( - transport: str = "rest", request_type=compute.UpdateInstanceRequest -): - client = InstancesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instance_resource"] = { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "cpu_platform": "cpu_platform_value", - "creation_timestamp": "creation_timestamp_value", - "deletion_protection": True, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "display_device": {"enable_display": True}, - "fingerprint": "fingerprint_value", - "guest_accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ], - "hostname": "hostname_value", - "id": 205, - "instance_encryption_key": {}, - "key_revocation_action_type": "key_revocation_action_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_start_timestamp": "last_start_timestamp_value", - "last_stop_timestamp": "last_stop_timestamp_value", - "last_suspended_timestamp": "last_suspended_timestamp_value", - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "name": "name_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "params": {"resource_manager_tags": {}}, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, - "satisfies_pzs": True, - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } + post.assert_not_called() + pb_message = compute.UpdateInstanceRequest.pb(compute.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "self_link": "self_link_value", - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "shielded_instance_integrity_policy": {"update_auto_learn_policy": True}, - "source_machine_image": "source_machine_image_value", - "source_machine_image_encryption_key": {}, - "start_restricted": True, - "status": "status_value", - "status_message": "status_message_value", - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - "zone": "zone_value", - } + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26743,8 +28403,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -26815,6 +28476,77 @@ def test_update_access_config_rest(request_type): "set_public_ptr": True, "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateAccessConfigInstanceRequest.meta.fields[ + "access_config_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "access_config_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_config_resource"][field])): + del request_init["access_config_resource"][field][i][subfield] + else: + del request_init["access_config_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -26848,8 +28580,9 @@ def test_update_access_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -26968,8 +28701,9 @@ def test_update_access_config_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27074,17 +28808,6 @@ def test_update_access_config_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["access_config_resource"] = { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27132,8 +28855,9 @@ def test_update_access_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27203,6 +28927,77 @@ def test_update_access_config_unary_rest(request_type): "set_public_ptr": True, "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateAccessConfigInstanceRequest.meta.fields[ + "access_config_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "access_config_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_config_resource"][field])): + del request_init["access_config_resource"][field][i][subfield] + else: + del request_init["access_config_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -27236,8 +29031,9 @@ def test_update_access_config_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27334,8 +29130,9 @@ def test_update_access_config_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27440,17 +29237,6 @@ def test_update_access_config_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["access_config_resource"] = { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27498,8 +29284,9 @@ def test_update_access_config_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27559,6 +29346,77 @@ def test_update_display_device_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["display_device_resource"] = {"enable_display": True} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateDisplayDeviceInstanceRequest.meta.fields[ + "display_device_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "display_device_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["display_device_resource"][field])): + del request_init["display_device_resource"][field][i][subfield] + else: + del request_init["display_device_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -27592,8 +29450,9 @@ def test_update_display_device_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27700,8 +29559,9 @@ def test_update_display_device_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27795,7 +29655,6 @@ def test_update_display_device_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["display_device_resource"] = {"enable_display": True} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27840,8 +29699,9 @@ def test_update_display_device_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -27898,6 +29758,77 @@ def test_update_display_device_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["display_device_resource"] = {"enable_display": True} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateDisplayDeviceInstanceRequest.meta.fields[ + "display_device_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "display_device_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["display_device_resource"][field])): + del request_init["display_device_resource"][field][i][subfield] + else: + del request_init["display_device_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -27931,8 +29862,9 @@ def test_update_display_device_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28017,8 +29949,9 @@ def test_update_display_device_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28112,7 +30045,6 @@ def test_update_display_device_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["display_device_resource"] = {"enable_display": True} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28157,8 +30089,9 @@ def test_update_display_device_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28249,6 +30182,79 @@ def test_update_network_interface_rest(request_type): "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateNetworkInterfaceInstanceRequest.meta.fields[ + "network_interface_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_interface_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_interface_resource"][field]) + ): + del request_init["network_interface_resource"][field][i][subfield] + else: + del request_init["network_interface_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -28282,8 +30288,9 @@ def test_update_network_interface_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28402,8 +30409,9 @@ def test_update_network_interface_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28508,41 +30516,6 @@ def test_update_network_interface_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["network_interface_resource"] = { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28592,8 +30565,9 @@ def test_update_network_interface_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28689,6 +30663,79 @@ def test_update_network_interface_unary_rest(request_type): "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateNetworkInterfaceInstanceRequest.meta.fields[ + "network_interface_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_interface_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_interface_resource"][field]) + ): + del request_init["network_interface_resource"][field][i][subfield] + else: + del request_init["network_interface_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -28722,8 +30769,9 @@ def test_update_network_interface_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28820,8 +30868,9 @@ def test_update_network_interface_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -28926,41 +30975,6 @@ def test_update_network_interface_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["network_interface_resource"] = { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -29010,8 +31024,9 @@ def test_update_network_interface_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -29077,6 +31092,81 @@ def test_update_shielded_instance_config_rest(request_type): "enable_secure_boot": True, "enable_vtpm": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateShieldedInstanceConfigInstanceRequest.meta.fields[ + "shielded_instance_config_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "shielded_instance_config_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["shielded_instance_config_resource"][field]) + ): + del request_init["shielded_instance_config_resource"][field][i][ + subfield + ] + else: + del request_init["shielded_instance_config_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -29110,8 +31200,9 @@ def test_update_shielded_instance_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -29218,8 +31309,9 @@ def test_update_shielded_instance_config_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -29316,11 +31408,6 @@ def test_update_shielded_instance_config_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["shielded_instance_config_resource"] = { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -29367,8 +31454,9 @@ def test_update_shielded_instance_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -29431,6 +31519,81 @@ def test_update_shielded_instance_config_unary_rest(request_type): "enable_secure_boot": True, "enable_vtpm": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateShieldedInstanceConfigInstanceRequest.meta.fields[ + "shielded_instance_config_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "shielded_instance_config_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["shielded_instance_config_resource"][field]) + ): + del request_init["shielded_instance_config_resource"][field][i][ + subfield + ] + else: + del request_init["shielded_instance_config_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -29464,8 +31627,9 @@ def test_update_shielded_instance_config_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -29550,8 +31714,9 @@ def test_update_shielded_instance_config_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -29648,11 +31813,6 @@ def test_update_shielded_instance_config_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["shielded_instance_config_resource"] = { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -29699,8 +31859,9 @@ def test_update_shielded_instance_config_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/tests/unit/gapic/compute_v1/test_interconnect_attachments.py index 4b162b59..d203f066 100644 --- a/tests/unit/gapic/compute_v1/test_interconnect_attachments.py +++ b/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -619,8 +619,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -710,10 +711,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachmentAggregatedList.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -853,8 +853,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1013,8 +1014,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1122,8 +1124,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1265,8 +1268,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1358,8 +1362,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1445,8 +1450,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1588,8 +1594,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1695,8 +1702,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1822,8 +1830,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1967,8 +1976,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2078,6 +2088,81 @@ def test_insert_rest(request_type): "type_": "type__value", "vlan_tag8021q": 1160, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInterconnectAttachmentRequest.meta.fields[ + "interconnect_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["interconnect_attachment_resource"][field]) + ): + del request_init["interconnect_attachment_resource"][field][i][ + subfield + ] + else: + del request_init["interconnect_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2111,8 +2196,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2220,8 +2306,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2321,61 +2408,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["interconnect_attachment_resource"] = { - "admin_enabled": True, - "bandwidth": "bandwidth_value", - "candidate_ipv6_subnets": [ - "candidate_ipv6_subnets_value1", - "candidate_ipv6_subnets_value2", - ], - "candidate_subnets": ["candidate_subnets_value1", "candidate_subnets_value2"], - "cloud_router_ip_address": "cloud_router_ip_address_value", - "cloud_router_ipv6_address": "cloud_router_ipv6_address_value", - "cloud_router_ipv6_interface_id": "cloud_router_ipv6_interface_id_value", - "configuration_constraints": { - "bgp_md5": "bgp_md5_value", - "bgp_peer_asn_ranges": [{"max_": 421, "min_": 419}], - }, - "creation_timestamp": "creation_timestamp_value", - "customer_router_ip_address": "customer_router_ip_address_value", - "customer_router_ipv6_address": "customer_router_ipv6_address_value", - "customer_router_ipv6_interface_id": "customer_router_ipv6_interface_id_value", - "dataplane_version": 1807, - "description": "description_value", - "edge_availability_domain": "edge_availability_domain_value", - "encryption": "encryption_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect": "interconnect_value", - "ipsec_internal_addresses": [ - "ipsec_internal_addresses_value1", - "ipsec_internal_addresses_value2", - ], - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "mtu": 342, - "name": "name_value", - "operational_status": "operational_status_value", - "pairing_key": "pairing_key_value", - "partner_asn": 1181, - "partner_metadata": { - "interconnect_name": "interconnect_name_value", - "partner_name": "partner_name_value", - "portal_url": "portal_url_value", - }, - "private_interconnect_info": {"tag8021q": 632}, - "region": "region_value", - "remote_service": "remote_service_value", - "router": "router_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - "subnet_length": 1394, - "type_": "type__value", - "vlan_tag8021q": 1160, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2417,8 +2449,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2530,6 +2563,81 @@ def test_insert_unary_rest(request_type): "type_": "type__value", "vlan_tag8021q": 1160, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInterconnectAttachmentRequest.meta.fields[ + "interconnect_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["interconnect_attachment_resource"][field]) + ): + del request_init["interconnect_attachment_resource"][field][i][ + subfield + ] + else: + del request_init["interconnect_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2563,8 +2671,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2650,8 +2759,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2751,61 +2861,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["interconnect_attachment_resource"] = { - "admin_enabled": True, - "bandwidth": "bandwidth_value", - "candidate_ipv6_subnets": [ - "candidate_ipv6_subnets_value1", - "candidate_ipv6_subnets_value2", - ], - "candidate_subnets": ["candidate_subnets_value1", "candidate_subnets_value2"], - "cloud_router_ip_address": "cloud_router_ip_address_value", - "cloud_router_ipv6_address": "cloud_router_ipv6_address_value", - "cloud_router_ipv6_interface_id": "cloud_router_ipv6_interface_id_value", - "configuration_constraints": { - "bgp_md5": "bgp_md5_value", - "bgp_peer_asn_ranges": [{"max_": 421, "min_": 419}], - }, - "creation_timestamp": "creation_timestamp_value", - "customer_router_ip_address": "customer_router_ip_address_value", - "customer_router_ipv6_address": "customer_router_ipv6_address_value", - "customer_router_ipv6_interface_id": "customer_router_ipv6_interface_id_value", - "dataplane_version": 1807, - "description": "description_value", - "edge_availability_domain": "edge_availability_domain_value", - "encryption": "encryption_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect": "interconnect_value", - "ipsec_internal_addresses": [ - "ipsec_internal_addresses_value1", - "ipsec_internal_addresses_value2", - ], - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "mtu": 342, - "name": "name_value", - "operational_status": "operational_status_value", - "pairing_key": "pairing_key_value", - "partner_asn": 1181, - "partner_metadata": { - "interconnect_name": "interconnect_name_value", - "partner_name": "partner_name_value", - "portal_url": "portal_url_value", - }, - "private_interconnect_info": {"tag8021q": 632}, - "region": "region_value", - "remote_service": "remote_service_value", - "router": "router_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - "subnet_length": 1394, - "type_": "type__value", - "vlan_tag8021q": 1160, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2847,8 +2902,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2920,8 +2976,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3013,8 +3070,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3156,8 +3214,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3327,6 +3386,81 @@ def test_patch_rest(request_type): "type_": "type__value", "vlan_tag8021q": 1160, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInterconnectAttachmentRequest.meta.fields[ + "interconnect_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["interconnect_attachment_resource"][field]) + ): + del request_init["interconnect_attachment_resource"][field][i][ + subfield + ] + else: + del request_init["interconnect_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3360,8 +3494,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3470,8 +3605,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3571,61 +3707,6 @@ def test_patch_rest_bad_request( "region": "sample2", "interconnect_attachment": "sample3", } - request_init["interconnect_attachment_resource"] = { - "admin_enabled": True, - "bandwidth": "bandwidth_value", - "candidate_ipv6_subnets": [ - "candidate_ipv6_subnets_value1", - "candidate_ipv6_subnets_value2", - ], - "candidate_subnets": ["candidate_subnets_value1", "candidate_subnets_value2"], - "cloud_router_ip_address": "cloud_router_ip_address_value", - "cloud_router_ipv6_address": "cloud_router_ipv6_address_value", - "cloud_router_ipv6_interface_id": "cloud_router_ipv6_interface_id_value", - "configuration_constraints": { - "bgp_md5": "bgp_md5_value", - "bgp_peer_asn_ranges": [{"max_": 421, "min_": 419}], - }, - "creation_timestamp": "creation_timestamp_value", - "customer_router_ip_address": "customer_router_ip_address_value", - "customer_router_ipv6_address": "customer_router_ipv6_address_value", - "customer_router_ipv6_interface_id": "customer_router_ipv6_interface_id_value", - "dataplane_version": 1807, - "description": "description_value", - "edge_availability_domain": "edge_availability_domain_value", - "encryption": "encryption_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect": "interconnect_value", - "ipsec_internal_addresses": [ - "ipsec_internal_addresses_value1", - "ipsec_internal_addresses_value2", - ], - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "mtu": 342, - "name": "name_value", - "operational_status": "operational_status_value", - "pairing_key": "pairing_key_value", - "partner_asn": 1181, - "partner_metadata": { - "interconnect_name": "interconnect_name_value", - "partner_name": "partner_name_value", - "portal_url": "portal_url_value", - }, - "private_interconnect_info": {"tag8021q": 632}, - "region": "region_value", - "remote_service": "remote_service_value", - "router": "router_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - "subnet_length": 1394, - "type_": "type__value", - "vlan_tag8021q": 1160, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3672,8 +3753,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3790,6 +3872,81 @@ def test_patch_unary_rest(request_type): "type_": "type__value", "vlan_tag8021q": 1160, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInterconnectAttachmentRequest.meta.fields[ + "interconnect_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["interconnect_attachment_resource"][field]) + ): + del request_init["interconnect_attachment_resource"][field][i][ + subfield + ] + else: + del request_init["interconnect_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3823,8 +3980,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3911,8 +4069,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4012,61 +4171,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "interconnect_attachment": "sample3", } - request_init["interconnect_attachment_resource"] = { - "admin_enabled": True, - "bandwidth": "bandwidth_value", - "candidate_ipv6_subnets": [ - "candidate_ipv6_subnets_value1", - "candidate_ipv6_subnets_value2", - ], - "candidate_subnets": ["candidate_subnets_value1", "candidate_subnets_value2"], - "cloud_router_ip_address": "cloud_router_ip_address_value", - "cloud_router_ipv6_address": "cloud_router_ipv6_address_value", - "cloud_router_ipv6_interface_id": "cloud_router_ipv6_interface_id_value", - "configuration_constraints": { - "bgp_md5": "bgp_md5_value", - "bgp_peer_asn_ranges": [{"max_": 421, "min_": 419}], - }, - "creation_timestamp": "creation_timestamp_value", - "customer_router_ip_address": "customer_router_ip_address_value", - "customer_router_ipv6_address": "customer_router_ipv6_address_value", - "customer_router_ipv6_interface_id": "customer_router_ipv6_interface_id_value", - "dataplane_version": 1807, - "description": "description_value", - "edge_availability_domain": "edge_availability_domain_value", - "encryption": "encryption_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect": "interconnect_value", - "ipsec_internal_addresses": [ - "ipsec_internal_addresses_value1", - "ipsec_internal_addresses_value2", - ], - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "mtu": 342, - "name": "name_value", - "operational_status": "operational_status_value", - "pairing_key": "pairing_key_value", - "partner_asn": 1181, - "partner_metadata": { - "interconnect_name": "interconnect_name_value", - "partner_name": "partner_name_value", - "portal_url": "portal_url_value", - }, - "private_interconnect_info": {"tag8021q": 632}, - "region": "region_value", - "remote_service": "remote_service_value", - "router": "router_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - "subnet_length": 1394, - "type_": "type__value", - "vlan_tag8021q": 1160, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4113,8 +4217,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4176,6 +4281,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsInterconnectAttachmentRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4209,8 +4389,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4317,8 +4498,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4414,10 +4596,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4464,8 +4642,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4527,6 +4706,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsInterconnectAttachmentRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4560,8 +4814,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4646,8 +4901,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4743,10 +4999,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4793,8 +5045,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/tests/unit/gapic/compute_v1/test_interconnect_locations.py index db9b7924..32936ac9 100644 --- a/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -620,8 +620,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectLocation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectLocation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -714,8 +715,9 @@ def test_get_rest_required_fields(request_type=compute.GetInterconnectLocationRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectLocation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectLocation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -849,8 +851,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectLocation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectLocation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -919,8 +922,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectLocationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1008,8 +1012,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectLocationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1145,8 +1150,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectLocationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py b/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py index 33adfb05..350c7752 100644 --- a/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py +++ b/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py @@ -630,8 +630,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectRemoteLocation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectRemoteLocation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -733,8 +734,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectRemoteLocation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectRemoteLocation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -871,8 +873,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectRemoteLocation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectRemoteLocation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -941,8 +944,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectRemoteLocationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectRemoteLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1030,8 +1034,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectRemoteLocationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectRemoteLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1167,8 +1172,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectRemoteLocationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectRemoteLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_interconnects.py b/tests/unit/gapic/compute_v1/test_interconnects.py index d0a9e41b..67e4342f 100644 --- a/tests/unit/gapic/compute_v1/test_interconnects.py +++ b/tests/unit/gapic/compute_v1/test_interconnects.py @@ -606,8 +606,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -707,8 +708,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteInterconnectRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -840,8 +842,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -928,8 +931,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1009,8 +1013,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1142,8 +1147,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1231,8 +1237,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Interconnect.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Interconnect.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1331,8 +1338,9 @@ def test_get_rest_required_fields(request_type=compute.GetInterconnectRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Interconnect.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Interconnect.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1462,8 +1470,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Interconnect.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Interconnect.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1527,8 +1536,9 @@ def test_get_diagnostics_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1606,10 +1616,9 @@ def test_get_diagnostics_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectsGetDiagnosticsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1743,8 +1752,9 @@ def test_get_diagnostics_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1849,6 +1859,75 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInterconnectRequest.meta.fields["interconnect_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["interconnect_resource"][field])): + del request_init["interconnect_resource"][field][i][subfield] + else: + del request_init["interconnect_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1882,8 +1961,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1980,8 +2060,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertInterconnectRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2075,57 +2156,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["interconnect_resource"] = { - "admin_enabled": True, - "circuit_infos": [ - { - "customer_demarc_id": "customer_demarc_id_value", - "google_circuit_id": "google_circuit_id_value", - "google_demarc_id": "google_demarc_id_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "customer_name": "customer_name_value", - "description": "description_value", - "expected_outages": [ - { - "affected_circuits": [ - "affected_circuits_value1", - "affected_circuits_value2", - ], - "description": "description_value", - "end_time": 837, - "issue_type": "issue_type_value", - "name": "name_value", - "source": "source_value", - "start_time": 1084, - "state": "state_value", - } - ], - "google_ip_address": "google_ip_address_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect_attachments": [ - "interconnect_attachments_value1", - "interconnect_attachments_value2", - ], - "interconnect_type": "interconnect_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "link_type": "link_type_value", - "location": "location_value", - "name": "name_value", - "noc_contact_email": "noc_contact_email_value", - "operational_status": "operational_status_value", - "peer_ip_address": "peer_ip_address_value", - "provisioned_link_count": 2375, - "remote_location": "remote_location_value", - "requested_link_count": 2151, - "satisfies_pzs": True, - "self_link": "self_link_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2164,8 +2194,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2270,6 +2301,75 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInterconnectRequest.meta.fields["interconnect_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["interconnect_resource"][field])): + del request_init["interconnect_resource"][field][i][subfield] + else: + del request_init["interconnect_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2303,8 +2403,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2381,8 +2482,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2476,57 +2578,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["interconnect_resource"] = { - "admin_enabled": True, - "circuit_infos": [ - { - "customer_demarc_id": "customer_demarc_id_value", - "google_circuit_id": "google_circuit_id_value", - "google_demarc_id": "google_demarc_id_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "customer_name": "customer_name_value", - "description": "description_value", - "expected_outages": [ - { - "affected_circuits": [ - "affected_circuits_value1", - "affected_circuits_value2", - ], - "description": "description_value", - "end_time": 837, - "issue_type": "issue_type_value", - "name": "name_value", - "source": "source_value", - "start_time": 1084, - "state": "state_value", - } - ], - "google_ip_address": "google_ip_address_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect_attachments": [ - "interconnect_attachments_value1", - "interconnect_attachments_value2", - ], - "interconnect_type": "interconnect_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "link_type": "link_type_value", - "location": "location_value", - "name": "name_value", - "noc_contact_email": "noc_contact_email_value", - "operational_status": "operational_status_value", - "peer_ip_address": "peer_ip_address_value", - "provisioned_link_count": 2375, - "remote_location": "remote_location_value", - "requested_link_count": 2151, - "satisfies_pzs": True, - "self_link": "self_link_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2565,8 +2616,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2635,8 +2687,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2722,8 +2775,9 @@ def test_list_rest_required_fields(request_type=compute.ListInterconnectsRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2859,8 +2913,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InterconnectList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InterconnectList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3019,6 +3074,75 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInterconnectRequest.meta.fields["interconnect_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["interconnect_resource"][field])): + del request_init["interconnect_resource"][field][i][subfield] + else: + del request_init["interconnect_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3052,8 +3176,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3154,8 +3279,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchInterconnectReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3250,57 +3376,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "interconnect": "sample2"} - request_init["interconnect_resource"] = { - "admin_enabled": True, - "circuit_infos": [ - { - "customer_demarc_id": "customer_demarc_id_value", - "google_circuit_id": "google_circuit_id_value", - "google_demarc_id": "google_demarc_id_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "customer_name": "customer_name_value", - "description": "description_value", - "expected_outages": [ - { - "affected_circuits": [ - "affected_circuits_value1", - "affected_circuits_value2", - ], - "description": "description_value", - "end_time": 837, - "issue_type": "issue_type_value", - "name": "name_value", - "source": "source_value", - "start_time": 1084, - "state": "state_value", - } - ], - "google_ip_address": "google_ip_address_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect_attachments": [ - "interconnect_attachments_value1", - "interconnect_attachments_value2", - ], - "interconnect_type": "interconnect_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "link_type": "link_type_value", - "location": "location_value", - "name": "name_value", - "noc_contact_email": "noc_contact_email_value", - "operational_status": "operational_status_value", - "peer_ip_address": "peer_ip_address_value", - "provisioned_link_count": 2375, - "remote_location": "remote_location_value", - "requested_link_count": 2151, - "satisfies_pzs": True, - "self_link": "self_link_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3340,8 +3415,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3447,6 +3523,75 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInterconnectRequest.meta.fields["interconnect_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "interconnect_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["interconnect_resource"][field])): + del request_init["interconnect_resource"][field][i][subfield] + else: + del request_init["interconnect_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3480,8 +3625,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3562,8 +3708,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3658,57 +3805,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "interconnect": "sample2"} - request_init["interconnect_resource"] = { - "admin_enabled": True, - "circuit_infos": [ - { - "customer_demarc_id": "customer_demarc_id_value", - "google_circuit_id": "google_circuit_id_value", - "google_demarc_id": "google_demarc_id_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "customer_name": "customer_name_value", - "description": "description_value", - "expected_outages": [ - { - "affected_circuits": [ - "affected_circuits_value1", - "affected_circuits_value2", - ], - "description": "description_value", - "end_time": 837, - "issue_type": "issue_type_value", - "name": "name_value", - "source": "source_value", - "start_time": 1084, - "state": "state_value", - } - ], - "google_ip_address": "google_ip_address_value", - "google_reference_id": "google_reference_id_value", - "id": 205, - "interconnect_attachments": [ - "interconnect_attachments_value1", - "interconnect_attachments_value2", - ], - "interconnect_type": "interconnect_type_value", - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "link_type": "link_type_value", - "location": "location_value", - "name": "name_value", - "noc_contact_email": "noc_contact_email_value", - "operational_status": "operational_status_value", - "peer_ip_address": "peer_ip_address_value", - "provisioned_link_count": 2375, - "remote_location": "remote_location_value", - "requested_link_count": 2151, - "satisfies_pzs": True, - "self_link": "self_link_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3748,8 +3844,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3808,6 +3905,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsInterconnectRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3841,8 +4013,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3943,8 +4116,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4039,10 +4213,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4084,8 +4254,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4146,6 +4317,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsInterconnectRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4179,8 +4425,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4259,8 +4506,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4355,10 +4603,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4400,8 +4644,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_license_codes.py b/tests/unit/gapic/compute_v1/test_license_codes.py index e9a940e4..f41d8c8f 100644 --- a/tests/unit/gapic/compute_v1/test_license_codes.py +++ b/tests/unit/gapic/compute_v1/test_license_codes.py @@ -575,8 +575,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.LicenseCode.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.LicenseCode.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -660,8 +661,9 @@ def test_get_rest_required_fields(request_type=compute.GetLicenseCodeRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.LicenseCode.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.LicenseCode.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -791,8 +793,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.LicenseCode.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.LicenseCode.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -849,6 +852,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsLicenseCodeRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -861,8 +939,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -942,8 +1021,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1040,9 +1120,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1084,8 +1161,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_licenses.py b/tests/unit/gapic/compute_v1/test_licenses.py index 16119912..aa17b014 100644 --- a/tests/unit/gapic/compute_v1/test_licenses.py +++ b/tests/unit/gapic/compute_v1/test_licenses.py @@ -589,8 +589,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -698,8 +699,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteLicenseRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -837,8 +839,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -925,8 +928,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1012,8 +1016,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteLicenseReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1151,8 +1156,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1226,8 +1232,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.License.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.License.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1317,8 +1324,9 @@ def test_get_rest_required_fields(request_type=compute.GetLicenseRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.License.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.License.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1451,8 +1459,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.License.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.License.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1520,8 +1529,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1604,8 +1614,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1735,8 +1746,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1802,6 +1814,73 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "transferable": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertLicenseRequest.meta.fields["license_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["license_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["license_resource"][field])): + del request_init["license_resource"][field][i][subfield] + else: + del request_init["license_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1835,8 +1914,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1933,8 +2013,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertLicenseRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2024,18 +2105,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["license_resource"] = { - "charges_use_fee": True, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "license_code": 1245, - "name": "name_value", - "resource_requirements": {"min_guest_cpu_count": 2042, "min_memory_mb": 1386}, - "self_link": "self_link_value", - "transferable": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2074,8 +2143,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2140,6 +2210,73 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "transferable": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertLicenseRequest.meta.fields["license_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["license_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["license_resource"][field])): + del request_init["license_resource"][field][i][subfield] + else: + del request_init["license_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2173,8 +2310,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2249,8 +2387,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertLicenseReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2340,18 +2479,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["license_resource"] = { - "charges_use_fee": True, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "license_code": 1245, - "name": "name_value", - "resource_requirements": {"min_guest_cpu_count": 2042, "min_memory_mb": 1386}, - "self_link": "self_link_value", - "transferable": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2390,8 +2517,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2458,8 +2586,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.LicensesListResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.LicensesListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2544,8 +2673,9 @@ def test_list_rest_required_fields(request_type=compute.ListLicensesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.LicensesListResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.LicensesListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2677,8 +2807,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.LicensesListResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.LicensesListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2862,6 +2993,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyLicenseRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2876,8 +3082,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2959,8 +3166,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3053,83 +3261,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3171,8 +3302,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3232,6 +3364,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsLicenseRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3244,8 +3451,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3325,8 +3533,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3421,9 +3630,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3465,8 +3671,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_machine_images.py b/tests/unit/gapic/compute_v1/test_machine_images.py index b0e03458..7b7a2a85 100644 --- a/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/tests/unit/gapic/compute_v1/test_machine_images.py @@ -606,8 +606,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -707,8 +708,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteMachineImageRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -840,8 +842,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -928,8 +931,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1009,8 +1013,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1142,8 +1147,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1220,8 +1226,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineImage.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineImage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1309,8 +1316,9 @@ def test_get_rest_required_fields(request_type=compute.GetMachineImageRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineImage.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineImage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1440,8 +1448,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineImage.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineImage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1509,8 +1518,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1593,8 +1603,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1726,8 +1737,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2008,6 +2020,75 @@ def test_insert_rest(request_type): "storage_locations": ["storage_locations_value1", "storage_locations_value2"], "total_storage_bytes": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertMachineImageRequest.meta.fields["machine_image_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "machine_image_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["machine_image_resource"][field])): + del request_init["machine_image_resource"][field][i][subfield] + else: + del request_init["machine_image_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2041,8 +2122,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2144,8 +2226,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertMachineImageRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2244,233 +2327,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["machine_image_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "guest_flush": True, - "id": 205, - "instance_properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "kind": "kind_value", - "machine_image_encryption_key": {}, - "name": "name_value", - "satisfies_pzs": True, - "saved_disks": [ - { - "architecture": "architecture_value", - "kind": "kind_value", - "source_disk": "source_disk_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - } - ], - "self_link": "self_link_value", - "source_disk_encryption_keys": [ - {"disk_encryption_key": {}, "source_disk": "source_disk_value"} - ], - "source_instance": "source_instance_value", - "source_instance_properties": { - "can_ip_forward": True, - "deletion_protection": True, - "description": "description_value", - "disks": [ - { - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": {}, - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "guest_os_features": {}, - "index": 536, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "source": "source_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "type_": "type__value", - } - ], - "guest_accelerators": {}, - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": {}, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": {}, - "scheduling": {}, - "service_accounts": {}, - "tags": {}, - }, - "status": "status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - "total_storage_bytes": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2511,8 +2367,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2795,6 +2652,75 @@ def test_insert_unary_rest(request_type): "storage_locations": ["storage_locations_value1", "storage_locations_value2"], "total_storage_bytes": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertMachineImageRequest.meta.fields["machine_image_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "machine_image_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["machine_image_resource"][field])): + del request_init["machine_image_resource"][field][i][subfield] + else: + del request_init["machine_image_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2828,8 +2754,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2897,347 +2824,121 @@ def test_insert_unary_rest_required_fields( # for required fields will fail the real version if the http_options # expect actual values for those fields. with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.insert_unary(request) - - expected_params = [] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_insert_unary_rest_unset_required_fields(): - transport = transports.MachineImagesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.insert._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "sourceInstance", - ) - ) - & set( - ( - "machineImageResource", - "project", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_unary_rest_interceptors(null_interceptor): - transport = transports.MachineImagesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MachineImagesRestInterceptor(), - ) - client = MachineImagesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.MachineImagesRestInterceptor, "post_insert" - ) as post, mock.patch.object( - transports.MachineImagesRestInterceptor, "pre_insert" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = compute.InsertMachineImageRequest.pb( - compute.InsertMachineImageRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.InsertMachineImageRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.insert_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_insert_unary_rest_bad_request( - transport: str = "rest", request_type=compute.InsertMachineImageRequest -): - client = MachineImagesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1"} - request_init["machine_image_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "guest_flush": True, - "id": 205, - "instance_properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "kind": "kind_value", - "machine_image_encryption_key": {}, - "name": "name_value", - "satisfies_pzs": True, - "saved_disks": [ - { - "architecture": "architecture_value", - "kind": "kind_value", - "source_disk": "source_disk_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - ], - "self_link": "self_link_value", - "source_disk_encryption_keys": [ - {"disk_encryption_key": {}, "source_disk": "source_disk_value"} - ], - "source_instance": "source_instance_value", - "source_instance_properties": { - "can_ip_forward": True, - "deletion_protection": True, - "description": "description_value", - "disks": [ - { - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": {}, - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "guest_os_features": {}, - "index": 536, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "source": "source_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "type_": "type__value", - } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sourceInstance", + ) + ) + & set( + ( + "machineImageResource", + "project", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertMachineImageRequest.pb( + compute.InsertMachineImageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertMachineImageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), ], - "guest_accelerators": {}, - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": {}, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": {}, - "scheduling": {}, - "service_accounts": {}, - "tags": {}, - }, - "status": "status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - "total_storage_bytes": 2046, - } + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request( + transport: str = "rest", request_type=compute.InsertMachineImageRequest +): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3278,8 +2979,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3350,8 +3052,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineImageList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineImageList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3437,8 +3140,9 @@ def test_list_rest_required_fields(request_type=compute.ListMachineImagesRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineImageList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineImageList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3574,8 +3278,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineImageList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineImageList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3760,6 +3465,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyMachineImageRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3774,8 +3554,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3857,8 +3638,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3953,83 +3735,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4071,8 +3776,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4132,6 +3838,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsMachineImageRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4144,8 +3925,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4225,8 +4007,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4323,9 +4106,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4367,8 +4147,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_machine_types.py b/tests/unit/gapic/compute_v1/test_machine_types.py index 56b0d7ad..08ac5cf4 100644 --- a/tests/unit/gapic/compute_v1/test_machine_types.py +++ b/tests/unit/gapic/compute_v1/test_machine_types.py @@ -573,8 +573,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -664,8 +665,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -802,8 +804,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -947,8 +950,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1041,8 +1045,9 @@ def test_get_rest_required_fields(request_type=compute.GetMachineTypeRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1178,8 +1183,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1249,8 +1255,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1340,8 +1347,9 @@ def test_list_rest_required_fields(request_type=compute.ListMachineTypesRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1483,8 +1491,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.MachineTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.MachineTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_network_attachments.py b/tests/unit/gapic/compute_v1/test_network_attachments.py index 545fbec4..4cae460b 100644 --- a/tests/unit/gapic/compute_v1/test_network_attachments.py +++ b/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -608,8 +608,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -698,8 +699,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -837,8 +839,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -997,8 +1000,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1104,8 +1108,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1247,8 +1252,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1340,8 +1346,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1425,8 +1432,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1568,8 +1576,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1653,8 +1662,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1748,8 +1758,9 @@ def test_get_rest_required_fields(request_type=compute.GetNetworkAttachmentReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1893,8 +1904,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1963,8 +1975,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2051,8 +2064,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2190,8 +2204,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2280,6 +2295,79 @@ def test_insert_rest(request_type): "self_link_with_id": "self_link_with_id_value", "subnetworks": ["subnetworks_value1", "subnetworks_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkAttachmentRequest.meta.fields[ + "network_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_attachment_resource"][field]) + ): + del request_init["network_attachment_resource"][field][i][subfield] + else: + del request_init["network_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2313,8 +2401,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2417,8 +2506,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2513,40 +2603,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_attachment_resource"] = { - "connection_endpoints": [ - { - "ip_address": "ip_address_value", - "project_id_or_num": "project_id_or_num_value", - "secondary_ip_cidr_ranges": [ - "secondary_ip_cidr_ranges_value1", - "secondary_ip_cidr_ranges_value2", - ], - "status": "status_value", - "subnetwork": "subnetwork_value", - } - ], - "connection_preference": "connection_preference_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "producer_accept_lists": [ - "producer_accept_lists_value1", - "producer_accept_lists_value2", - ], - "producer_reject_lists": [ - "producer_reject_lists_value1", - "producer_reject_lists_value2", - ], - "region": "region_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "subnetworks": ["subnetworks_value1", "subnetworks_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2592,8 +2648,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2688,6 +2745,79 @@ def test_insert_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "subnetworks": ["subnetworks_value1", "subnetworks_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkAttachmentRequest.meta.fields[ + "network_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_attachment_resource"][field]) + ): + del request_init["network_attachment_resource"][field][i][subfield] + else: + del request_init["network_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2721,8 +2851,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2803,8 +2934,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2899,40 +3031,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_attachment_resource"] = { - "connection_endpoints": [ - { - "ip_address": "ip_address_value", - "project_id_or_num": "project_id_or_num_value", - "secondary_ip_cidr_ranges": [ - "secondary_ip_cidr_ranges_value1", - "secondary_ip_cidr_ranges_value2", - ], - "status": "status_value", - "subnetwork": "subnetwork_value", - } - ], - "connection_preference": "connection_preference_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "producer_accept_lists": [ - "producer_accept_lists_value1", - "producer_accept_lists_value2", - ], - "producer_reject_lists": [ - "producer_reject_lists_value1", - "producer_reject_lists_value2", - ], - "region": "region_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "subnetworks": ["subnetworks_value1", "subnetworks_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2978,8 +3076,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3055,8 +3154,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3146,8 +3246,9 @@ def test_list_rest_required_fields(request_type=compute.ListNetworkAttachmentsRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3289,8 +3390,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3476,6 +3578,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyNetworkAttachmentRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3490,8 +3667,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3577,8 +3755,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3674,83 +3853,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3797,8 +3899,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3859,6 +3962,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsNetworkAttachmentRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3871,8 +4049,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3956,8 +4135,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4056,9 +4236,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4105,8 +4282,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_network_edge_security_services.py b/tests/unit/gapic/compute_v1/test_network_edge_security_services.py index a33b94fc..6b5fbe1a 100644 --- a/tests/unit/gapic/compute_v1/test_network_edge_security_services.py +++ b/tests/unit/gapic/compute_v1/test_network_edge_security_services.py @@ -626,10 +626,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -720,10 +719,11 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -863,10 +863,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1026,8 +1025,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1138,8 +1138,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1282,8 +1283,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1375,8 +1377,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1465,8 +1468,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1609,8 +1613,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1690,8 +1695,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEdgeSecurityService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEdgeSecurityService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1788,8 +1794,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEdgeSecurityService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEdgeSecurityService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1933,8 +1940,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEdgeSecurityService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEdgeSecurityService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2001,6 +2009,84 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "self_link_with_id": "self_link_with_id_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkEdgeSecurityServiceRequest.meta.fields[ + "network_edge_security_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_edge_security_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["network_edge_security_service_resource"][field]), + ): + del request_init["network_edge_security_service_resource"][field][ + i + ][subfield] + else: + del request_init["network_edge_security_service_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2034,8 +2120,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2143,8 +2230,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2245,18 +2333,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_edge_security_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "security_policy": "security_policy_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2298,8 +2374,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2368,6 +2445,84 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "self_link_with_id": "self_link_with_id_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkEdgeSecurityServiceRequest.meta.fields[ + "network_edge_security_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_edge_security_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["network_edge_security_service_resource"][field]), + ): + del request_init["network_edge_security_service_resource"][field][ + i + ][subfield] + else: + del request_init["network_edge_security_service_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2401,8 +2556,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2488,8 +2644,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2590,18 +2747,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_edge_security_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "security_policy": "security_policy_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2643,8 +2788,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2717,6 +2863,84 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "self_link_with_id": "self_link_with_id_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkEdgeSecurityServiceRequest.meta.fields[ + "network_edge_security_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_edge_security_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["network_edge_security_service_resource"][field]), + ): + del request_init["network_edge_security_service_resource"][field][ + i + ][subfield] + else: + del request_init["network_edge_security_service_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2750,8 +2974,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2869,8 +3094,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2976,18 +3202,6 @@ def test_patch_rest_bad_request( "region": "sample2", "network_edge_security_service": "sample3", } - request_init["network_edge_security_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "security_policy": "security_policy_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3034,8 +3248,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3109,6 +3324,84 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "self_link_with_id": "self_link_with_id_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkEdgeSecurityServiceRequest.meta.fields[ + "network_edge_security_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_edge_security_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["network_edge_security_service_resource"][field]), + ): + del request_init["network_edge_security_service_resource"][field][ + i + ][subfield] + else: + del request_init["network_edge_security_service_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3142,8 +3435,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3239,8 +3533,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3346,18 +3641,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "network_edge_security_service": "sample3", } - request_init["network_edge_security_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "security_policy": "security_policy_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3404,8 +3687,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py index 17121fcc..6474bae9 100644 --- a/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py +++ b/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -617,8 +617,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -708,10 +709,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupAggregatedList.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -849,8 +849,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -987,6 +988,88 @@ def test_attach_network_endpoints_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_groups_attach_endpoints_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1020,8 +1103,9 @@ def test_attach_network_endpoints_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1128,8 +1212,9 @@ def test_attach_network_endpoints_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1230,17 +1315,6 @@ def test_attach_network_endpoints_rest_bad_request( "zone": "sample2", "network_endpoint_group": "sample3", } - request_init["network_endpoint_groups_attach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1289,8 +1363,9 @@ def test_attach_network_endpoints_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1365,6 +1440,88 @@ def test_attach_network_endpoints_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_groups_attach_endpoints_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1398,8 +1555,9 @@ def test_attach_network_endpoints_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1484,8 +1642,9 @@ def test_attach_network_endpoints_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1586,17 +1745,6 @@ def test_attach_network_endpoints_unary_rest_bad_request( "zone": "sample2", "network_endpoint_group": "sample3", } - request_init["network_endpoint_groups_attach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1645,8 +1793,9 @@ def test_attach_network_endpoints_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1743,8 +1892,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1850,8 +2000,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1993,8 +2144,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2086,8 +2238,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2171,8 +2324,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2314,8 +2468,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2385,6 +2540,88 @@ def test_detach_network_endpoints_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_groups_detach_endpoints_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2418,8 +2655,9 @@ def test_detach_network_endpoints_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2526,8 +2764,9 @@ def test_detach_network_endpoints_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2628,17 +2867,6 @@ def test_detach_network_endpoints_rest_bad_request( "zone": "sample2", "network_endpoint_group": "sample3", } - request_init["network_endpoint_groups_detach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2687,8 +2915,9 @@ def test_detach_network_endpoints_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2763,6 +2992,88 @@ def test_detach_network_endpoints_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_groups_detach_endpoints_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2796,8 +3107,9 @@ def test_detach_network_endpoints_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2882,8 +3194,9 @@ def test_detach_network_endpoints_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2984,17 +3297,6 @@ def test_detach_network_endpoints_unary_rest_bad_request( "zone": "sample2", "network_endpoint_group": "sample3", } - request_init["network_endpoint_groups_detach_endpoints_request_resource"] = { - "network_endpoints": [ - { - "annotations": {}, - "fqdn": "fqdn_value", - "instance": "instance_value", - "ip_address": "ip_address_value", - "port": 453, - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3043,8 +3345,9 @@ def test_detach_network_endpoints_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3133,8 +3436,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3228,8 +3532,9 @@ def test_get_rest_required_fields(request_type=compute.GetNetworkEndpointGroupRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3373,8 +3678,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3462,6 +3768,81 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3495,8 +3876,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3599,8 +3981,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3695,39 +4078,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3769,8 +4119,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3860,6 +4211,81 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3893,8 +4319,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3975,8 +4402,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4071,39 +4499,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4145,8 +4540,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4218,8 +4614,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4311,8 +4708,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4454,8 +4852,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4571,6 +4970,88 @@ def test_list_network_endpoints_rest(request_type): request_init["network_endpoint_groups_list_endpoints_request_resource"] = { "health_status": "health_status_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.meta.fields[ + "network_endpoint_groups_list_endpoints_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_groups_list_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "network_endpoint_groups_list_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "network_endpoint_groups_list_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "network_endpoint_groups_list_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4585,10 +5066,11 @@ def test_list_network_endpoints_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4684,10 +5166,11 @@ def test_list_network_endpoints_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4800,9 +5283,6 @@ def test_list_network_endpoints_rest_bad_request( "zone": "sample2", "network_endpoint_group": "sample3", } - request_init["network_endpoint_groups_list_endpoints_request_resource"] = { - "health_status": "health_status_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4849,10 +5329,11 @@ def test_list_network_endpoints_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4982,6 +5463,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsNetworkEndpointGroupRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4994,8 +5550,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5079,8 +5636,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5179,9 +5737,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5228,8 +5783,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_network_firewall_policies.py b/tests/unit/gapic/compute_v1/test_network_firewall_policies.py index ae765d86..6ff70bbf 100644 --- a/tests/unit/gapic/compute_v1/test_network_firewall_policies.py +++ b/tests/unit/gapic/compute_v1/test_network_firewall_policies.py @@ -610,6 +610,83 @@ def test_add_association_rest(request_type): "name": "name_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAssociationNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_association_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_association_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_association_resource"][field]) + ): + del request_init["firewall_policy_association_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_association_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -643,8 +720,9 @@ def test_add_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -752,8 +830,9 @@ def test_add_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -854,13 +933,6 @@ def test_add_association_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_association_resource"] = { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -902,8 +974,9 @@ def test_add_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -967,6 +1040,83 @@ def test_add_association_unary_rest(request_type): "name": "name_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAssociationNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_association_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_association_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_association_resource"][field]) + ): + del request_init["firewall_policy_association_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_association_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1000,8 +1150,9 @@ def test_add_association_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1087,8 +1238,9 @@ def test_add_association_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1189,13 +1341,6 @@ def test_add_association_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_association_resource"] = { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1237,8 +1382,9 @@ def test_add_association_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1346,6 +1492,81 @@ def test_add_rule_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1379,8 +1600,9 @@ def test_add_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1489,8 +1711,9 @@ def test_add_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1591,57 +1814,6 @@ def test_add_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1683,8 +1855,9 @@ def test_add_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1792,6 +1965,81 @@ def test_add_rule_unary_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1825,8 +2073,9 @@ def test_add_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1913,8 +2162,9 @@ def test_add_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2015,57 +2265,6 @@ def test_add_rule_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2107,8 +2306,9 @@ def test_add_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2198,8 +2398,9 @@ def test_clone_rules_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2306,8 +2507,9 @@ def test_clone_rules_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2444,8 +2646,9 @@ def test_clone_rules_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2532,8 +2735,9 @@ def test_clone_rules_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2618,8 +2822,9 @@ def test_clone_rules_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2756,8 +2961,9 @@ def test_clone_rules_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2844,8 +3050,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2947,8 +3154,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3080,8 +3288,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3168,8 +3377,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3249,8 +3459,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3382,8 +3593,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3461,8 +3673,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3551,8 +3764,9 @@ def test_get_rest_required_fields(request_type=compute.GetNetworkFirewallPolicyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3686,8 +3900,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3757,8 +3972,9 @@ def test_get_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3843,8 +4059,9 @@ def test_get_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3979,8 +4196,9 @@ def test_get_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4048,8 +4266,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4132,8 +4351,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4266,8 +4486,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4343,8 +4564,9 @@ def test_get_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4435,8 +4657,9 @@ def test_get_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4570,8 +4793,9 @@ def test_get_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4711,6 +4935,77 @@ def test_insert_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4744,8 +5039,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4844,8 +5140,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4939,92 +5236,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5069,8 +5280,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5216,6 +5428,77 @@ def test_insert_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5249,8 +5532,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5327,8 +5611,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5422,92 +5707,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5552,8 +5751,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5627,8 +5827,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5715,8 +5916,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5852,8 +6054,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6047,6 +6250,77 @@ def test_patch_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6080,8 +6354,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6184,8 +6459,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6280,92 +6556,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6411,8 +6601,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6559,6 +6750,77 @@ def test_patch_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6592,8 +6854,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6674,8 +6937,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6740,122 +7004,36 @@ def test_patch_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchNetworkFirewallPolicyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.patch_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchNetworkFirewallPolicyRequest -): - client = NetworkFirewallPoliciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } + request = compute.PatchNetworkFirewallPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchNetworkFirewallPolicyRequest +): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall_policy": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6901,8 +7079,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7014,6 +7193,81 @@ def test_patch_rule_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7047,8 +7301,9 @@ def test_patch_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7156,8 +7411,9 @@ def test_patch_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7257,57 +7513,6 @@ def test_patch_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7349,8 +7554,9 @@ def test_patch_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7458,6 +7664,81 @@ def test_patch_rule_unary_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7491,8 +7772,9 @@ def test_patch_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7578,8 +7860,9 @@ def test_patch_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7679,57 +7962,6 @@ def test_patch_rule_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "firewall_policy": "sample2"} - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7771,8 +8003,9 @@ def test_patch_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7862,8 +8095,9 @@ def test_remove_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7970,8 +8204,9 @@ def test_remove_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8109,8 +8344,9 @@ def test_remove_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8197,8 +8433,9 @@ def test_remove_association_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8283,8 +8520,9 @@ def test_remove_association_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8422,8 +8660,9 @@ def test_remove_association_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8510,8 +8749,9 @@ def test_remove_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8618,8 +8858,9 @@ def test_remove_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8756,8 +8997,9 @@ def test_remove_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8844,8 +9086,9 @@ def test_remove_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8930,8 +9173,9 @@ def test_remove_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9068,8 +9312,9 @@ def test_remove_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9200,6 +9445,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyNetworkFirewallPolicyRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9214,8 +9534,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9297,8 +9618,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9394,83 +9716,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9512,8 +9757,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9573,6 +9819,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsNetworkFirewallPolicyRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9585,8 +9906,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9666,8 +9988,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9765,9 +10088,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9809,8 +10129,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_networks.py b/tests/unit/gapic/compute_v1/test_networks.py index 2f4603aa..7e24c3a8 100644 --- a/tests/unit/gapic/compute_v1/test_networks.py +++ b/tests/unit/gapic/compute_v1/test_networks.py @@ -575,6 +575,83 @@ def test_add_peering_rest(request_type): }, "peer_network": "peer_network_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddPeeringNetworkRequest.meta.fields[ + "networks_add_peering_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "networks_add_peering_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["networks_add_peering_request_resource"][field]) + ): + del request_init["networks_add_peering_request_resource"][field][i][ + subfield + ] + else: + del request_init["networks_add_peering_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -608,8 +685,9 @@ def test_add_peering_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -712,8 +790,9 @@ def test_add_peering_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -806,25 +885,6 @@ def test_add_peering_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["networks_add_peering_request_resource"] = { - "auto_create_routes": True, - "name": "name_value", - "network_peering": { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - }, - "peer_network": "peer_network_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -866,8 +926,9 @@ def test_add_peering_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -943,6 +1004,83 @@ def test_add_peering_unary_rest(request_type): }, "peer_network": "peer_network_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddPeeringNetworkRequest.meta.fields[ + "networks_add_peering_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "networks_add_peering_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["networks_add_peering_request_resource"][field]) + ): + del request_init["networks_add_peering_request_resource"][field][i][ + subfield + ] + else: + del request_init["networks_add_peering_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -976,8 +1114,9 @@ def test_add_peering_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1058,8 +1197,9 @@ def test_add_peering_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1152,25 +1292,6 @@ def test_add_peering_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["networks_add_peering_request_resource"] = { - "auto_create_routes": True, - "name": "name_value", - "network_peering": { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - }, - "peer_network": "peer_network_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1212,8 +1333,9 @@ def test_add_peering_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1303,8 +1425,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1404,8 +1527,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteNetworkRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1533,8 +1657,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1621,8 +1746,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1700,8 +1826,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1829,8 +1956,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1911,8 +2039,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Network.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Network.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2007,8 +2136,9 @@ def test_get_rest_required_fields(request_type=compute.GetNetworkRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Network.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Network.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2136,8 +2266,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Network.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Network.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2201,8 +2332,9 @@ def test_get_effective_firewalls_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2280,10 +2412,11 @@ def test_get_effective_firewalls_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworksGetEffectiveFirewallsResponse.pb( + # Convert return value to protobuf type + return_value = compute.NetworksGetEffectiveFirewallsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2417,8 +2550,9 @@ def test_get_effective_firewalls_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2507,6 +2641,73 @@ def test_insert_rest(request_type): "self_link_with_id": "self_link_with_id_value", "subnetworks": ["subnetworks_value1", "subnetworks_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkRequest.meta.fields["network_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_resource"][field])): + del request_init["network_resource"][field][i][subfield] + else: + del request_init["network_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2540,8 +2741,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2638,8 +2840,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertNetworkRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2729,41 +2932,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["network_resource"] = { - "I_pv4_range": "I_pv4_range_value", - "auto_create_subnetworks": True, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_ula_internal_ipv6": True, - "firewall_policy": "firewall_policy_value", - "gateway_i_pv4": "gateway_i_pv4_value", - "id": 205, - "internal_ipv6_range": "internal_ipv6_range_value", - "kind": "kind_value", - "mtu": 342, - "name": "name_value", - "network_firewall_policy_enforcement_order": "network_firewall_policy_enforcement_order_value", - "peerings": [ - { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - } - ], - "routing_config": {"routing_mode": "routing_mode_value"}, - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "subnetworks": ["subnetworks_value1", "subnetworks_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2802,8 +2970,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2891,6 +3060,73 @@ def test_insert_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "subnetworks": ["subnetworks_value1", "subnetworks_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNetworkRequest.meta.fields["network_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_resource"][field])): + del request_init["network_resource"][field][i][subfield] + else: + del request_init["network_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2924,8 +3160,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3000,8 +3237,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3091,41 +3329,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["network_resource"] = { - "I_pv4_range": "I_pv4_range_value", - "auto_create_subnetworks": True, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_ula_internal_ipv6": True, - "firewall_policy": "firewall_policy_value", - "gateway_i_pv4": "gateway_i_pv4_value", - "id": 205, - "internal_ipv6_range": "internal_ipv6_range_value", - "kind": "kind_value", - "mtu": 342, - "name": "name_value", - "network_firewall_policy_enforcement_order": "network_firewall_policy_enforcement_order_value", - "peerings": [ - { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - } - ], - "routing_config": {"routing_mode": "routing_mode_value"}, - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "subnetworks": ["subnetworks_value1", "subnetworks_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3164,8 +3367,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3233,8 +3437,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3320,8 +3525,9 @@ def test_list_rest_required_fields(request_type=compute.ListNetworksRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3451,8 +3657,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3574,8 +3781,9 @@ def test_list_peering_routes_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExchangedPeeringRoutesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExchangedPeeringRoutesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3670,8 +3878,9 @@ def test_list_peering_routes_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExchangedPeeringRoutesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExchangedPeeringRoutesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3814,8 +4023,9 @@ def test_list_peering_routes_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ExchangedPeeringRoutesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ExchangedPeeringRoutesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3961,6 +4171,73 @@ def test_patch_rest(request_type): "self_link_with_id": "self_link_with_id_value", "subnetworks": ["subnetworks_value1", "subnetworks_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkRequest.meta.fields["network_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_resource"][field])): + del request_init["network_resource"][field][i][subfield] + else: + del request_init["network_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3994,8 +4271,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4096,8 +4374,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchNetworkRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4188,41 +4467,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["network_resource"] = { - "I_pv4_range": "I_pv4_range_value", - "auto_create_subnetworks": True, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_ula_internal_ipv6": True, - "firewall_policy": "firewall_policy_value", - "gateway_i_pv4": "gateway_i_pv4_value", - "id": 205, - "internal_ipv6_range": "internal_ipv6_range_value", - "kind": "kind_value", - "mtu": 342, - "name": "name_value", - "network_firewall_policy_enforcement_order": "network_firewall_policy_enforcement_order_value", - "peerings": [ - { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - } - ], - "routing_config": {"routing_mode": "routing_mode_value"}, - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "subnetworks": ["subnetworks_value1", "subnetworks_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4262,8 +4506,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4353,6 +4598,73 @@ def test_patch_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "subnetworks": ["subnetworks_value1", "subnetworks_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkRequest.meta.fields["network_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_resource"][field])): + del request_init["network_resource"][field][i][subfield] + else: + del request_init["network_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4386,8 +4698,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4466,8 +4779,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchNetworkReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4558,41 +4872,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["network_resource"] = { - "I_pv4_range": "I_pv4_range_value", - "auto_create_subnetworks": True, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_ula_internal_ipv6": True, - "firewall_policy": "firewall_policy_value", - "gateway_i_pv4": "gateway_i_pv4_value", - "id": 205, - "internal_ipv6_range": "internal_ipv6_range_value", - "kind": "kind_value", - "mtu": 342, - "name": "name_value", - "network_firewall_policy_enforcement_order": "network_firewall_policy_enforcement_order_value", - "peerings": [ - { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - } - ], - "routing_config": {"routing_mode": "routing_mode_value"}, - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "subnetworks": ["subnetworks_value1", "subnetworks_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4632,8 +4911,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4689,6 +4969,86 @@ def test_remove_peering_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} request_init["networks_remove_peering_request_resource"] = {"name": "name_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemovePeeringNetworkRequest.meta.fields[ + "networks_remove_peering_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "networks_remove_peering_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["networks_remove_peering_request_resource"][field] + ), + ): + del request_init["networks_remove_peering_request_resource"][field][ + i + ][subfield] + else: + del request_init["networks_remove_peering_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4722,8 +5082,9 @@ def test_remove_peering_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4826,8 +5187,9 @@ def test_remove_peering_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4920,7 +5282,6 @@ def test_remove_peering_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["networks_remove_peering_request_resource"] = {"name": "name_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4962,8 +5323,9 @@ def test_remove_peering_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5021,6 +5383,86 @@ def test_remove_peering_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} request_init["networks_remove_peering_request_resource"] = {"name": "name_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemovePeeringNetworkRequest.meta.fields[ + "networks_remove_peering_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "networks_remove_peering_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["networks_remove_peering_request_resource"][field] + ), + ): + del request_init["networks_remove_peering_request_resource"][field][ + i + ][subfield] + else: + del request_init["networks_remove_peering_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5054,8 +5496,9 @@ def test_remove_peering_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5136,8 +5579,9 @@ def test_remove_peering_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5230,7 +5674,6 @@ def test_remove_peering_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["networks_remove_peering_request_resource"] = {"name": "name_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5272,8 +5715,9 @@ def test_remove_peering_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5363,8 +5807,9 @@ def test_switch_to_custom_mode_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5466,8 +5911,9 @@ def test_switch_to_custom_mode_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5597,8 +6043,9 @@ def test_switch_to_custom_mode_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5685,8 +6132,9 @@ def test_switch_to_custom_mode_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5766,8 +6214,9 @@ def test_switch_to_custom_mode_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5897,8 +6346,9 @@ def test_switch_to_custom_mode_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5968,6 +6418,86 @@ def test_update_peering_rest(request_type): "state_details": "state_details_value", } } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdatePeeringNetworkRequest.meta.fields[ + "networks_update_peering_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "networks_update_peering_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["networks_update_peering_request_resource"][field] + ), + ): + del request_init["networks_update_peering_request_resource"][field][ + i + ][subfield] + else: + del request_init["networks_update_peering_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6001,8 +6531,9 @@ def test_update_peering_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6105,8 +6636,9 @@ def test_update_peering_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6199,22 +6731,6 @@ def test_update_peering_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["networks_update_peering_request_resource"] = { - "network_peering": { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - } - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6256,8 +6772,9 @@ def test_update_peering_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6330,6 +6847,86 @@ def test_update_peering_unary_rest(request_type): "state_details": "state_details_value", } } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdatePeeringNetworkRequest.meta.fields[ + "networks_update_peering_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "networks_update_peering_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["networks_update_peering_request_resource"][field] + ), + ): + del request_init["networks_update_peering_request_resource"][field][ + i + ][subfield] + else: + del request_init["networks_update_peering_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6363,8 +6960,9 @@ def test_update_peering_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6445,8 +7043,9 @@ def test_update_peering_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6539,22 +7138,6 @@ def test_update_peering_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "network": "sample2"} - request_init["networks_update_peering_request_resource"] = { - "network_peering": { - "auto_create_routes": True, - "exchange_subnet_routes": True, - "export_custom_routes": True, - "export_subnet_routes_with_public_ip": True, - "import_custom_routes": True, - "import_subnet_routes_with_public_ip": True, - "name": "name_value", - "network": "network_value", - "peer_mtu": 865, - "stack_type": "stack_type_value", - "state": "state_value", - "state_details": "state_details_value", - } - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6596,8 +7179,9 @@ def test_update_peering_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_node_groups.py b/tests/unit/gapic/compute_v1/test_node_groups.py index 944a37ad..77a864a3 100644 --- a/tests/unit/gapic/compute_v1/test_node_groups.py +++ b/tests/unit/gapic/compute_v1/test_node_groups.py @@ -567,6 +567,84 @@ def test_add_nodes_rest(request_type): request_init["node_groups_add_nodes_request_resource"] = { "additional_node_count": 2214 } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddNodesNodeGroupRequest.meta.fields[ + "node_groups_add_nodes_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_add_nodes_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["node_groups_add_nodes_request_resource"][field]), + ): + del request_init["node_groups_add_nodes_request_resource"][field][ + i + ][subfield] + else: + del request_init["node_groups_add_nodes_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -600,8 +678,9 @@ def test_add_nodes_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -706,8 +785,9 @@ def test_add_nodes_rest_required_fields(request_type=compute.AddNodesNodeGroupRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -803,9 +883,6 @@ def test_add_nodes_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_add_nodes_request_resource"] = { - "additional_node_count": 2214 - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -852,8 +929,9 @@ def test_add_nodes_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -914,6 +992,84 @@ def test_add_nodes_unary_rest(request_type): request_init["node_groups_add_nodes_request_resource"] = { "additional_node_count": 2214 } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddNodesNodeGroupRequest.meta.fields[ + "node_groups_add_nodes_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_add_nodes_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["node_groups_add_nodes_request_resource"][field]), + ): + del request_init["node_groups_add_nodes_request_resource"][field][ + i + ][subfield] + else: + del request_init["node_groups_add_nodes_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -947,8 +1103,9 @@ def test_add_nodes_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1033,8 +1190,9 @@ def test_add_nodes_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1130,9 +1288,6 @@ def test_add_nodes_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_add_nodes_request_resource"] = { - "additional_node_count": 2214 - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1179,8 +1334,9 @@ def test_add_nodes_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1254,8 +1410,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1345,8 +1502,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1483,8 +1641,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1634,8 +1793,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1739,8 +1899,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteNodeGroupRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1876,8 +2037,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1965,8 +2127,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2048,8 +2211,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteNodeGroupR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2185,8 +2349,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2244,6 +2409,86 @@ def test_delete_nodes_rest(request_type): request_init["node_groups_delete_nodes_request_resource"] = { "nodes": ["nodes_value1", "nodes_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeleteNodesNodeGroupRequest.meta.fields[ + "node_groups_delete_nodes_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_delete_nodes_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["node_groups_delete_nodes_request_resource"][field] + ), + ): + del request_init["node_groups_delete_nodes_request_resource"][ + field + ][i][subfield] + else: + del request_init["node_groups_delete_nodes_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2277,8 +2522,9 @@ def test_delete_nodes_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2385,8 +2631,9 @@ def test_delete_nodes_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2482,9 +2729,6 @@ def test_delete_nodes_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_delete_nodes_request_resource"] = { - "nodes": ["nodes_value1", "nodes_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2531,8 +2775,9 @@ def test_delete_nodes_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2593,6 +2838,86 @@ def test_delete_nodes_unary_rest(request_type): request_init["node_groups_delete_nodes_request_resource"] = { "nodes": ["nodes_value1", "nodes_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeleteNodesNodeGroupRequest.meta.fields[ + "node_groups_delete_nodes_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_delete_nodes_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["node_groups_delete_nodes_request_resource"][field] + ), + ): + del request_init["node_groups_delete_nodes_request_resource"][ + field + ][i][subfield] + else: + del request_init["node_groups_delete_nodes_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2626,8 +2951,9 @@ def test_delete_nodes_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2712,8 +3038,9 @@ def test_delete_nodes_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2809,9 +3136,6 @@ def test_delete_nodes_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_delete_nodes_request_resource"] = { - "nodes": ["nodes_value1", "nodes_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2858,8 +3182,9 @@ def test_delete_nodes_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2941,8 +3266,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3035,8 +3361,9 @@ def test_get_rest_required_fields(request_type=compute.GetNodeGroupRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3172,8 +3499,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3242,8 +3570,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3330,8 +3659,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3469,8 +3799,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3550,6 +3881,73 @@ def test_insert_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNodeGroupRequest.meta.fields["node_group_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["node_group_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["node_group_resource"][field])): + del request_init["node_group_resource"][field][i][subfield] + else: + del request_init["node_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3583,8 +3981,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3697,8 +4096,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertNodeGroupRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3802,31 +4202,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["node_group_resource"] = { - "autoscaling_policy": { - "max_nodes": 958, - "min_nodes": 956, - "mode": "mode_value", - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "location_hint": "location_hint_value", - "maintenance_policy": "maintenance_policy_value", - "maintenance_window": { - "maintenance_duration": {"nanos": 543, "seconds": 751}, - "start_time": "start_time_value", - }, - "name": "name_value", - "node_template": "node_template_value", - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "size": 443, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3869,8 +4244,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3953,6 +4329,73 @@ def test_insert_unary_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNodeGroupRequest.meta.fields["node_group_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["node_group_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["node_group_resource"][field])): + del request_init["node_group_resource"][field][i][subfield] + else: + del request_init["node_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3986,8 +4429,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4078,8 +4522,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertNodeGroupR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4183,31 +4628,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["node_group_resource"] = { - "autoscaling_policy": { - "max_nodes": 958, - "min_nodes": 956, - "mode": "mode_value", - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "location_hint": "location_hint_value", - "maintenance_policy": "maintenance_policy_value", - "maintenance_window": { - "maintenance_duration": {"nanos": 543, "seconds": 751}, - "start_time": "start_time_value", - }, - "name": "name_value", - "node_template": "node_template_value", - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "size": 443, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4250,8 +4670,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4324,8 +4745,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4415,8 +4837,9 @@ def test_list_rest_required_fields(request_type=compute.ListNodeGroupsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4556,8 +4979,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4681,8 +5105,9 @@ def test_list_nodes_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupsListNodes.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupsListNodes.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4778,8 +5203,9 @@ def test_list_nodes_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupsListNodes.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupsListNodes.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4927,8 +5353,9 @@ def test_list_nodes_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeGroupsListNodes.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeGroupsListNodes.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5067,6 +5494,73 @@ def test_patch_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNodeGroupRequest.meta.fields["node_group_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["node_group_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["node_group_resource"][field])): + del request_init["node_group_resource"][field][i][subfield] + else: + del request_init["node_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5100,8 +5594,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5206,8 +5701,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchNodeGroupRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5301,31 +5797,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_group_resource"] = { - "autoscaling_policy": { - "max_nodes": 958, - "min_nodes": 956, - "mode": "mode_value", - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "location_hint": "location_hint_value", - "maintenance_policy": "maintenance_policy_value", - "maintenance_window": { - "maintenance_duration": {"nanos": 543, "seconds": 751}, - "start_time": "start_time_value", - }, - "name": "name_value", - "node_template": "node_template_value", - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "size": 443, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5372,8 +5843,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5456,6 +5928,73 @@ def test_patch_unary_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNodeGroupRequest.meta.fields["node_group_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["node_group_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["node_group_resource"][field])): + del request_init["node_group_resource"][field][i][subfield] + else: + del request_init["node_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5489,8 +6028,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5573,8 +6113,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchNodeGroupReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5668,31 +6209,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_group_resource"] = { - "autoscaling_policy": { - "max_nodes": 958, - "min_nodes": 956, - "mode": "mode_value", - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "location_hint": "location_hint_value", - "maintenance_policy": "maintenance_policy_value", - "maintenance_window": { - "maintenance_duration": {"nanos": 543, "seconds": 751}, - "start_time": "start_time_value", - }, - "name": "name_value", - "node_template": "node_template_value", - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "size": 443, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5739,8 +6255,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5875,6 +6392,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyNodeGroupRequest.meta.fields[ + "zone_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_policy_request_resource"][field]) + ): + del request_init["zone_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5889,8 +6481,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5976,8 +6569,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6073,83 +6667,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6196,8 +6713,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6258,6 +6776,88 @@ def test_set_node_template_rest(request_type): request_init["node_groups_set_node_template_request_resource"] = { "node_template": "node_template_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNodeTemplateNodeGroupRequest.meta.fields[ + "node_groups_set_node_template_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_set_node_template_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["node_groups_set_node_template_request_resource"][ + field + ] + ), + ): + del request_init["node_groups_set_node_template_request_resource"][ + field + ][i][subfield] + else: + del request_init["node_groups_set_node_template_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6291,8 +6891,9 @@ def test_set_node_template_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6399,8 +7000,9 @@ def test_set_node_template_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6496,9 +7098,6 @@ def test_set_node_template_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_set_node_template_request_resource"] = { - "node_template": "node_template_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6545,8 +7144,9 @@ def test_set_node_template_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6607,6 +7207,88 @@ def test_set_node_template_unary_rest(request_type): request_init["node_groups_set_node_template_request_resource"] = { "node_template": "node_template_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNodeTemplateNodeGroupRequest.meta.fields[ + "node_groups_set_node_template_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_set_node_template_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["node_groups_set_node_template_request_resource"][ + field + ] + ), + ): + del request_init["node_groups_set_node_template_request_resource"][ + field + ][i][subfield] + else: + del request_init["node_groups_set_node_template_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6640,8 +7322,9 @@ def test_set_node_template_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6726,8 +7409,9 @@ def test_set_node_template_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6823,9 +7507,6 @@ def test_set_node_template_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_set_node_template_request_resource"] = { - "node_template": "node_template_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6872,8 +7553,9 @@ def test_set_node_template_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6934,6 +7616,88 @@ def test_simulate_maintenance_event_rest(request_type): request_init["node_groups_simulate_maintenance_event_request_resource"] = { "nodes": ["nodes_value1", "nodes_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SimulateMaintenanceEventNodeGroupRequest.meta.fields[ + "node_groups_simulate_maintenance_event_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ][field] + ), + ): + del request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ][field][i][subfield] + else: + del request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6967,8 +7731,9 @@ def test_simulate_maintenance_event_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7075,8 +7840,9 @@ def test_simulate_maintenance_event_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7173,9 +7939,6 @@ def test_simulate_maintenance_event_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_simulate_maintenance_event_request_resource"] = { - "nodes": ["nodes_value1", "nodes_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7222,8 +7985,9 @@ def test_simulate_maintenance_event_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7284,6 +8048,88 @@ def test_simulate_maintenance_event_unary_rest(request_type): request_init["node_groups_simulate_maintenance_event_request_resource"] = { "nodes": ["nodes_value1", "nodes_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SimulateMaintenanceEventNodeGroupRequest.meta.fields[ + "node_groups_simulate_maintenance_event_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ][field] + ), + ): + del request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ][field][i][subfield] + else: + del request_init[ + "node_groups_simulate_maintenance_event_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7317,8 +8163,9 @@ def test_simulate_maintenance_event_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7403,8 +8250,9 @@ def test_simulate_maintenance_event_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7501,9 +8349,6 @@ def test_simulate_maintenance_event_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} - request_init["node_groups_simulate_maintenance_event_request_resource"] = { - "nodes": ["nodes_value1", "nodes_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7550,8 +8395,9 @@ def test_simulate_maintenance_event_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7612,6 +8458,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsNodeGroupRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7624,8 +8545,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7709,8 +8631,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7808,9 +8731,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7857,8 +8777,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_node_templates.py b/tests/unit/gapic/compute_v1/test_node_templates.py index cf563004..dcb67f59 100644 --- a/tests/unit/gapic/compute_v1/test_node_templates.py +++ b/tests/unit/gapic/compute_v1/test_node_templates.py @@ -589,8 +589,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -680,8 +681,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -818,8 +820,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -978,8 +981,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1083,8 +1087,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteNodeTemplateRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1226,8 +1231,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1319,8 +1325,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1404,8 +1411,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1547,8 +1555,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1629,8 +1638,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1721,8 +1731,9 @@ def test_get_rest_required_fields(request_type=compute.GetNodeTemplateRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1862,8 +1873,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1932,8 +1944,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2020,8 +2033,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2159,8 +2173,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2241,6 +2256,75 @@ def test_insert_rest(request_type): "status": "status_value", "status_message": "status_message_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNodeTemplateRequest.meta.fields["node_template_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_template_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["node_template_resource"][field])): + del request_init["node_template_resource"][field][i][subfield] + else: + del request_init["node_template_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2274,8 +2358,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2376,8 +2461,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertNodeTemplateRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2472,32 +2558,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["node_template_resource"] = { - "accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ], - "cpu_overcommit_type": "cpu_overcommit_type_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disks": [ - {"disk_count": 1075, "disk_size_gb": 1261, "disk_type": "disk_type_value"} - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "node_affinity_labels": {}, - "node_type": "node_type_value", - "node_type_flexibility": { - "cpus": "cpus_value", - "local_ssd": "local_ssd_value", - "memory": "memory_value", - }, - "region": "region_value", - "self_link": "self_link_value", - "server_binding": {"type_": "type__value"}, - "status": "status_value", - "status_message": "status_message_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2539,8 +2599,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2623,6 +2684,75 @@ def test_insert_unary_rest(request_type): "status": "status_value", "status_message": "status_message_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertNodeTemplateRequest.meta.fields["node_template_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_template_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["node_template_resource"][field])): + del request_init["node_template_resource"][field][i][subfield] + else: + del request_init["node_template_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2656,8 +2786,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2738,8 +2869,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2834,32 +2966,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["node_template_resource"] = { - "accelerators": [ - {"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"} - ], - "cpu_overcommit_type": "cpu_overcommit_type_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disks": [ - {"disk_count": 1075, "disk_size_gb": 1261, "disk_type": "disk_type_value"} - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "node_affinity_labels": {}, - "node_type": "node_type_value", - "node_type_flexibility": { - "cpus": "cpus_value", - "local_ssd": "local_ssd_value", - "memory": "memory_value", - }, - "region": "region_value", - "self_link": "self_link_value", - "server_binding": {"type_": "type__value"}, - "status": "status_value", - "status_message": "status_message_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2901,8 +3007,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2974,8 +3081,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3065,8 +3173,9 @@ def test_list_rest_required_fields(request_type=compute.ListNodeTemplatesRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3208,8 +3317,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3395,6 +3505,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyNodeTemplateRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3409,8 +3594,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3496,8 +3682,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3593,83 +3780,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3716,8 +3826,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3778,6 +3889,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsNodeTemplateRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3790,8 +3976,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3875,8 +4062,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3974,9 +4162,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4023,8 +4208,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_node_types.py b/tests/unit/gapic/compute_v1/test_node_types.py index d86b5229..e549155a 100644 --- a/tests/unit/gapic/compute_v1/test_node_types.py +++ b/tests/unit/gapic/compute_v1/test_node_types.py @@ -571,8 +571,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -662,8 +663,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -798,8 +800,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTypeAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -938,8 +941,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1030,8 +1034,9 @@ def test_get_rest_required_fields(request_type=compute.GetNodeTypeRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1165,8 +1170,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1236,8 +1242,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1327,8 +1334,9 @@ def test_list_rest_required_fields(request_type=compute.ListNodeTypesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1464,8 +1472,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NodeTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NodeTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/tests/unit/gapic/compute_v1/test_packet_mirrorings.py index d37f9942..084fb68a 100644 --- a/tests/unit/gapic/compute_v1/test_packet_mirrorings.py +++ b/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroringAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroringAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroringAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroringAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -838,8 +840,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroringAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroringAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -998,8 +1001,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1103,8 +1107,9 @@ def test_delete_rest_required_fields(request_type=compute.DeletePacketMirroringR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1246,8 +1251,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1339,8 +1345,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1424,8 +1431,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1567,8 +1575,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1647,8 +1656,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroring.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroring.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1737,8 +1747,9 @@ def test_get_rest_required_fields(request_type=compute.GetPacketMirroringRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroring.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroring.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1882,8 +1893,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroring.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroring.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1963,6 +1975,79 @@ def test_insert_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPacketMirroringRequest.meta.fields[ + "packet_mirroring_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "packet_mirroring_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["packet_mirroring_resource"][field]) + ): + del request_init["packet_mirroring_resource"][field][i][subfield] + else: + del request_init["packet_mirroring_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1996,8 +2081,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2098,8 +2184,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertPacketMirroringR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2194,31 +2281,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["packet_mirroring_resource"] = { - "collector_ilb": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable": "enable_value", - "filter": { - "I_p_protocols": ["I_p_protocols_value1", "I_p_protocols_value2"], - "cidr_ranges": ["cidr_ranges_value1", "cidr_ranges_value2"], - "direction": "direction_value", - }, - "id": 205, - "kind": "kind_value", - "mirrored_resources": { - "instances": [{"canonical_url": "canonical_url_value", "url": "url_value"}], - "subnetworks": [ - {"canonical_url": "canonical_url_value", "url": "url_value"} - ], - "tags": ["tags_value1", "tags_value2"], - }, - "name": "name_value", - "network": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "priority": 898, - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2262,8 +2324,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2347,6 +2410,79 @@ def test_insert_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPacketMirroringRequest.meta.fields[ + "packet_mirroring_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "packet_mirroring_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["packet_mirroring_resource"][field]) + ): + del request_init["packet_mirroring_resource"][field][i][subfield] + else: + del request_init["packet_mirroring_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2380,8 +2516,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2462,8 +2599,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2558,31 +2696,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["packet_mirroring_resource"] = { - "collector_ilb": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable": "enable_value", - "filter": { - "I_p_protocols": ["I_p_protocols_value1", "I_p_protocols_value2"], - "cidr_ranges": ["cidr_ranges_value1", "cidr_ranges_value2"], - "direction": "direction_value", - }, - "id": 205, - "kind": "kind_value", - "mirrored_resources": { - "instances": [{"canonical_url": "canonical_url_value", "url": "url_value"}], - "subnetworks": [ - {"canonical_url": "canonical_url_value", "url": "url_value"} - ], - "tags": ["tags_value1", "tags_value2"], - }, - "name": "name_value", - "network": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "priority": 898, - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2626,8 +2739,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2701,8 +2815,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroringList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroringList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2792,8 +2907,9 @@ def test_list_rest_required_fields(request_type=compute.ListPacketMirroringsRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroringList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroringList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2935,8 +3051,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PacketMirroringList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PacketMirroringList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3074,6 +3191,79 @@ def test_patch_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPacketMirroringRequest.meta.fields[ + "packet_mirroring_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "packet_mirroring_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["packet_mirroring_resource"][field]) + ): + del request_init["packet_mirroring_resource"][field][i][subfield] + else: + del request_init["packet_mirroring_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3107,8 +3297,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3213,8 +3404,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchPacketMirroringReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3314,31 +3506,6 @@ def test_patch_rest_bad_request( "region": "sample2", "packet_mirroring": "sample3", } - request_init["packet_mirroring_resource"] = { - "collector_ilb": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable": "enable_value", - "filter": { - "I_p_protocols": ["I_p_protocols_value1", "I_p_protocols_value2"], - "cidr_ranges": ["cidr_ranges_value1", "cidr_ranges_value2"], - "direction": "direction_value", - }, - "id": 205, - "kind": "kind_value", - "mirrored_resources": { - "instances": [{"canonical_url": "canonical_url_value", "url": "url_value"}], - "subnetworks": [ - {"canonical_url": "canonical_url_value", "url": "url_value"} - ], - "tags": ["tags_value1", "tags_value2"], - }, - "name": "name_value", - "network": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "priority": 898, - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3387,8 +3554,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3477,6 +3645,79 @@ def test_patch_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPacketMirroringRequest.meta.fields[ + "packet_mirroring_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "packet_mirroring_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["packet_mirroring_resource"][field]) + ): + del request_init["packet_mirroring_resource"][field][i][subfield] + else: + del request_init["packet_mirroring_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3510,8 +3751,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3596,8 +3838,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3697,31 +3940,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "packet_mirroring": "sample3", } - request_init["packet_mirroring_resource"] = { - "collector_ilb": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable": "enable_value", - "filter": { - "I_p_protocols": ["I_p_protocols_value1", "I_p_protocols_value2"], - "cidr_ranges": ["cidr_ranges_value1", "cidr_ranges_value2"], - "direction": "direction_value", - }, - "id": 205, - "kind": "kind_value", - "mirrored_resources": { - "instances": [{"canonical_url": "canonical_url_value", "url": "url_value"}], - "subnetworks": [ - {"canonical_url": "canonical_url_value", "url": "url_value"} - ], - "tags": ["tags_value1", "tags_value2"], - }, - "name": "name_value", - "network": {"canonical_url": "canonical_url_value", "url": "url_value"}, - "priority": 898, - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3770,8 +3988,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3834,6 +4053,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsPacketMirroringRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3846,8 +4140,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3931,8 +4226,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4031,9 +4327,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4080,8 +4373,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_projects.py b/tests/unit/gapic/compute_v1/test_projects.py index 6dd4d6e4..8561c54f 100644 --- a/tests/unit/gapic/compute_v1/test_projects.py +++ b/tests/unit/gapic/compute_v1/test_projects.py @@ -589,8 +589,9 @@ def test_disable_xpn_host_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -688,8 +689,9 @@ def test_disable_xpn_host_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -810,8 +812,9 @@ def test_disable_xpn_host_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -896,8 +899,9 @@ def test_disable_xpn_host_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -973,8 +977,9 @@ def test_disable_xpn_host_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1095,8 +1100,9 @@ def test_disable_xpn_host_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1151,6 +1157,88 @@ def test_disable_xpn_resource_rest(request_type): request_init["projects_disable_xpn_resource_request_resource"] = { "xpn_resource": {"id": "id_value", "type_": "type__value"} } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DisableXpnResourceProjectRequest.meta.fields[ + "projects_disable_xpn_resource_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_disable_xpn_resource_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["projects_disable_xpn_resource_request_resource"][ + field + ] + ), + ): + del request_init["projects_disable_xpn_resource_request_resource"][ + field + ][i][subfield] + else: + del request_init["projects_disable_xpn_resource_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1184,8 +1272,9 @@ def test_disable_xpn_resource_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1284,8 +1373,9 @@ def test_disable_xpn_resource_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1377,9 +1467,6 @@ def test_disable_xpn_resource_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_disable_xpn_resource_request_resource"] = { - "xpn_resource": {"id": "id_value", "type_": "type__value"} - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1420,8 +1507,9 @@ def test_disable_xpn_resource_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1480,6 +1568,88 @@ def test_disable_xpn_resource_unary_rest(request_type): request_init["projects_disable_xpn_resource_request_resource"] = { "xpn_resource": {"id": "id_value", "type_": "type__value"} } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DisableXpnResourceProjectRequest.meta.fields[ + "projects_disable_xpn_resource_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_disable_xpn_resource_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["projects_disable_xpn_resource_request_resource"][ + field + ] + ), + ): + del request_init["projects_disable_xpn_resource_request_resource"][ + field + ][i][subfield] + else: + del request_init["projects_disable_xpn_resource_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1513,8 +1683,9 @@ def test_disable_xpn_resource_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1591,8 +1762,9 @@ def test_disable_xpn_resource_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1684,9 +1856,6 @@ def test_disable_xpn_resource_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_disable_xpn_resource_request_resource"] = { - "xpn_resource": {"id": "id_value", "type_": "type__value"} - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1727,8 +1896,9 @@ def test_disable_xpn_resource_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1817,8 +1987,9 @@ def test_enable_xpn_host_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1916,8 +2087,9 @@ def test_enable_xpn_host_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2038,8 +2210,9 @@ def test_enable_xpn_host_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2124,8 +2297,9 @@ def test_enable_xpn_host_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2201,8 +2375,9 @@ def test_enable_xpn_host_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2323,8 +2498,9 @@ def test_enable_xpn_host_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2379,6 +2555,88 @@ def test_enable_xpn_resource_rest(request_type): request_init["projects_enable_xpn_resource_request_resource"] = { "xpn_resource": {"id": "id_value", "type_": "type__value"} } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.EnableXpnResourceProjectRequest.meta.fields[ + "projects_enable_xpn_resource_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_enable_xpn_resource_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["projects_enable_xpn_resource_request_resource"][ + field + ] + ), + ): + del request_init["projects_enable_xpn_resource_request_resource"][ + field + ][i][subfield] + else: + del request_init["projects_enable_xpn_resource_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2412,8 +2670,9 @@ def test_enable_xpn_resource_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2512,8 +2771,9 @@ def test_enable_xpn_resource_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2605,9 +2865,6 @@ def test_enable_xpn_resource_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_enable_xpn_resource_request_resource"] = { - "xpn_resource": {"id": "id_value", "type_": "type__value"} - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2648,8 +2905,9 @@ def test_enable_xpn_resource_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2708,6 +2966,88 @@ def test_enable_xpn_resource_unary_rest(request_type): request_init["projects_enable_xpn_resource_request_resource"] = { "xpn_resource": {"id": "id_value", "type_": "type__value"} } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.EnableXpnResourceProjectRequest.meta.fields[ + "projects_enable_xpn_resource_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_enable_xpn_resource_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["projects_enable_xpn_resource_request_resource"][ + field + ] + ), + ): + del request_init["projects_enable_xpn_resource_request_resource"][ + field + ][i][subfield] + else: + del request_init["projects_enable_xpn_resource_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2741,8 +3081,9 @@ def test_enable_xpn_resource_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2819,8 +3160,9 @@ def test_enable_xpn_resource_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2912,9 +3254,6 @@ def test_enable_xpn_resource_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_enable_xpn_resource_request_resource"] = { - "xpn_resource": {"id": "id_value", "type_": "type__value"} - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2955,8 +3294,9 @@ def test_enable_xpn_resource_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3034,8 +3374,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Project.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3118,8 +3459,9 @@ def test_get_rest_required_fields(request_type=compute.GetProjectRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Project.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3238,8 +3580,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Project.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3312,8 +3655,9 @@ def test_get_xpn_host_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Project.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3398,8 +3742,9 @@ def test_get_xpn_host_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Project.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3520,8 +3865,9 @@ def test_get_xpn_host_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Project.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3586,8 +3932,9 @@ def test_get_xpn_resources_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ProjectsGetXpnResources.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ProjectsGetXpnResources.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3673,8 +4020,9 @@ def test_get_xpn_resources_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ProjectsGetXpnResources.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ProjectsGetXpnResources.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3808,8 +4156,9 @@ def test_get_xpn_resources_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ProjectsGetXpnResources.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ProjectsGetXpnResources.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3919,6 +4268,86 @@ def test_list_xpn_hosts_rest(request_type): request_init["projects_list_xpn_hosts_request_resource"] = { "organization": "organization_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ListXpnHostsProjectsRequest.meta.fields[ + "projects_list_xpn_hosts_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_list_xpn_hosts_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["projects_list_xpn_hosts_request_resource"][field] + ), + ): + del request_init["projects_list_xpn_hosts_request_resource"][field][ + i + ][subfield] + else: + del request_init["projects_list_xpn_hosts_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3934,8 +4363,9 @@ def test_list_xpn_hosts_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.XpnHostList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.XpnHostList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4024,8 +4454,9 @@ def test_list_xpn_hosts_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.XpnHostList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.XpnHostList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4125,9 +4556,6 @@ def test_list_xpn_hosts_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_list_xpn_hosts_request_resource"] = { - "organization": "organization_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4168,8 +4596,9 @@ def test_list_xpn_hosts_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.XpnHostList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.XpnHostList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4286,6 +4715,79 @@ def test_move_disk_rest(request_type): "destination_zone": "destination_zone_value", "target_disk": "target_disk_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveDiskProjectRequest.meta.fields[ + "disk_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disk_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["disk_move_request_resource"][field]) + ): + del request_init["disk_move_request_resource"][field][i][subfield] + else: + del request_init["disk_move_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4319,8 +4821,9 @@ def test_move_disk_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4417,8 +4920,9 @@ def test_move_disk_rest_required_fields(request_type=compute.MoveDiskProjectRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4508,10 +5012,6 @@ def test_move_disk_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["disk_move_request_resource"] = { - "destination_zone": "destination_zone_value", - "target_disk": "target_disk_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4552,8 +5052,9 @@ def test_move_disk_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4612,6 +5113,79 @@ def test_move_disk_unary_rest(request_type): "destination_zone": "destination_zone_value", "target_disk": "target_disk_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveDiskProjectRequest.meta.fields[ + "disk_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disk_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["disk_move_request_resource"][field]) + ): + del request_init["disk_move_request_resource"][field][i][subfield] + else: + del request_init["disk_move_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4645,8 +5219,9 @@ def test_move_disk_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4723,8 +5298,9 @@ def test_move_disk_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4814,10 +5390,6 @@ def test_move_disk_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["disk_move_request_resource"] = { - "destination_zone": "destination_zone_value", - "target_disk": "target_disk_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4858,8 +5430,9 @@ def test_move_disk_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4918,6 +5491,81 @@ def test_move_instance_rest(request_type): "destination_zone": "destination_zone_value", "target_instance": "target_instance_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveInstanceProjectRequest.meta.fields[ + "instance_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_move_request_resource"][field]) + ): + del request_init["instance_move_request_resource"][field][i][ + subfield + ] + else: + del request_init["instance_move_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4951,8 +5599,9 @@ def test_move_instance_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5051,8 +5700,9 @@ def test_move_instance_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5144,10 +5794,6 @@ def test_move_instance_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["instance_move_request_resource"] = { - "destination_zone": "destination_zone_value", - "target_instance": "target_instance_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5188,8 +5834,9 @@ def test_move_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5248,6 +5895,81 @@ def test_move_instance_unary_rest(request_type): "destination_zone": "destination_zone_value", "target_instance": "target_instance_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.MoveInstanceProjectRequest.meta.fields[ + "instance_move_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_move_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_move_request_resource"][field]) + ): + del request_init["instance_move_request_resource"][field][i][ + subfield + ] + else: + del request_init["instance_move_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5281,8 +6003,9 @@ def test_move_instance_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5359,8 +6082,9 @@ def test_move_instance_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5452,10 +6176,6 @@ def test_move_instance_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["instance_move_request_resource"] = { - "destination_zone": "destination_zone_value", - "target_instance": "target_instance_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5496,8 +6216,9 @@ def test_move_instance_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5557,6 +6278,75 @@ def test_set_common_instance_metadata_rest(request_type): "items": [{"key": "key_value", "value": "value_value"}], "kind": "kind_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetCommonInstanceMetadataProjectRequest.meta.fields[ + "metadata_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_resource"][field])): + del request_init["metadata_resource"][field][i][subfield] + else: + del request_init["metadata_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5590,8 +6380,9 @@ def test_set_common_instance_metadata_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5690,8 +6481,9 @@ def test_set_common_instance_metadata_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5784,11 +6576,6 @@ def test_set_common_instance_metadata_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["metadata_resource"] = { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5827,8 +6614,9 @@ def test_set_common_instance_metadata_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5887,6 +6675,75 @@ def test_set_common_instance_metadata_unary_rest(request_type): "items": [{"key": "key_value", "value": "value_value"}], "kind": "kind_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetCommonInstanceMetadataProjectRequest.meta.fields[ + "metadata_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_resource"][field])): + del request_init["metadata_resource"][field][i][subfield] + else: + del request_init["metadata_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5920,8 +6777,9 @@ def test_set_common_instance_metadata_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5998,8 +6856,9 @@ def test_set_common_instance_metadata_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6092,11 +6951,6 @@ def test_set_common_instance_metadata_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["metadata_resource"] = { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6135,8 +6989,9 @@ def test_set_common_instance_metadata_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6195,6 +7050,88 @@ def test_set_default_network_tier_rest(request_type): request_init["projects_set_default_network_tier_request_resource"] = { "network_tier": "network_tier_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetDefaultNetworkTierProjectRequest.meta.fields[ + "projects_set_default_network_tier_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_set_default_network_tier_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "projects_set_default_network_tier_request_resource" + ][field] + ), + ): + del request_init[ + "projects_set_default_network_tier_request_resource" + ][field][i][subfield] + else: + del request_init["projects_set_default_network_tier_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6228,8 +7165,9 @@ def test_set_default_network_tier_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6328,8 +7266,9 @@ def test_set_default_network_tier_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6421,9 +7360,6 @@ def test_set_default_network_tier_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_set_default_network_tier_request_resource"] = { - "network_tier": "network_tier_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6464,8 +7400,9 @@ def test_set_default_network_tier_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6524,6 +7461,88 @@ def test_set_default_network_tier_unary_rest(request_type): request_init["projects_set_default_network_tier_request_resource"] = { "network_tier": "network_tier_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetDefaultNetworkTierProjectRequest.meta.fields[ + "projects_set_default_network_tier_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "projects_set_default_network_tier_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "projects_set_default_network_tier_request_resource" + ][field] + ), + ): + del request_init[ + "projects_set_default_network_tier_request_resource" + ][field][i][subfield] + else: + del request_init["projects_set_default_network_tier_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6557,8 +7576,9 @@ def test_set_default_network_tier_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6635,8 +7655,9 @@ def test_set_default_network_tier_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6728,9 +7749,6 @@ def test_set_default_network_tier_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["projects_set_default_network_tier_request_resource"] = { - "network_tier": "network_tier_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6771,8 +7789,9 @@ def test_set_default_network_tier_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6832,6 +7851,81 @@ def test_set_usage_export_bucket_rest(request_type): "bucket_name": "bucket_name_value", "report_name_prefix": "report_name_prefix_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUsageExportBucketProjectRequest.meta.fields[ + "usage_export_location_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "usage_export_location_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["usage_export_location_resource"][field]) + ): + del request_init["usage_export_location_resource"][field][i][ + subfield + ] + else: + del request_init["usage_export_location_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6865,8 +7959,9 @@ def test_set_usage_export_bucket_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6965,8 +8060,9 @@ def test_set_usage_export_bucket_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7058,10 +8154,6 @@ def test_set_usage_export_bucket_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["usage_export_location_resource"] = { - "bucket_name": "bucket_name_value", - "report_name_prefix": "report_name_prefix_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7102,8 +8194,9 @@ def test_set_usage_export_bucket_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7163,6 +8256,81 @@ def test_set_usage_export_bucket_unary_rest(request_type): "bucket_name": "bucket_name_value", "report_name_prefix": "report_name_prefix_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUsageExportBucketProjectRequest.meta.fields[ + "usage_export_location_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "usage_export_location_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["usage_export_location_resource"][field]) + ): + del request_init["usage_export_location_resource"][field][i][ + subfield + ] + else: + del request_init["usage_export_location_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7196,8 +8364,9 @@ def test_set_usage_export_bucket_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7274,8 +8443,9 @@ def test_set_usage_export_bucket_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7367,10 +8537,6 @@ def test_set_usage_export_bucket_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["usage_export_location_resource"] = { - "bucket_name": "bucket_name_value", - "report_name_prefix": "report_name_prefix_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7411,8 +8577,9 @@ def test_set_usage_export_bucket_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py index 8b3cf677..4b008cf7 100644 --- a/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py +++ b/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -640,8 +640,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -745,8 +746,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -878,8 +880,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -966,8 +969,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1049,8 +1053,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1182,8 +1187,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1259,8 +1265,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicAdvertisedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1351,8 +1358,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicAdvertisedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1486,8 +1494,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicAdvertisedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1563,6 +1572,81 @@ def test_insert_rest(request_type): "shared_secret": "shared_secret_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1596,8 +1680,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1696,8 +1781,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1791,28 +1877,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1853,8 +1917,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1932,6 +1997,81 @@ def test_insert_unary_rest(request_type): "shared_secret": "shared_secret_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1965,8 +2105,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2043,8 +2184,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2138,28 +2280,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2200,8 +2320,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2272,8 +2393,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicAdvertisedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2361,8 +2483,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicAdvertisedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2498,8 +2621,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicAdvertisedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2631,6 +2755,81 @@ def test_patch_rest(request_type): "shared_secret": "shared_secret_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2664,8 +2863,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2770,8 +2970,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2866,28 +3067,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2929,8 +3108,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3009,6 +3189,81 @@ def test_patch_unary_rest(request_type): "shared_secret": "shared_secret_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3042,8 +3297,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3126,8 +3382,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3222,28 +3479,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3285,8 +3520,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py index d7c55686..510a69d0 100644 --- a/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py +++ b/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -619,8 +619,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -710,10 +711,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixAggregatedList.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -851,8 +851,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1011,8 +1012,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1118,8 +1120,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1261,8 +1264,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1354,8 +1358,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1439,8 +1444,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1582,8 +1588,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1665,8 +1672,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1760,8 +1768,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1905,8 +1914,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1986,6 +1996,81 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2019,8 +2104,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2123,8 +2209,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2219,31 +2306,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2285,8 +2347,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2368,6 +2431,81 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2401,8 +2539,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2483,8 +2622,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2579,31 +2719,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2645,8 +2760,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2718,8 +2834,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2811,8 +2928,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2954,8 +3072,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3093,6 +3212,81 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3126,8 +3320,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3234,8 +3429,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3335,31 +3531,6 @@ def test_patch_rest_bad_request( "region": "sample2", "public_delegated_prefix": "sample3", } - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3406,8 +3577,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3494,6 +3666,81 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3527,8 +3774,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3613,8 +3861,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3714,31 +3963,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "public_delegated_prefix": "sample3", } - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3785,8 +4009,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_autoscalers.py b/tests/unit/gapic/compute_v1/test_region_autoscalers.py index 9e7c5eee..151d6bd7 100644 --- a/tests/unit/gapic/compute_v1/test_region_autoscalers.py +++ b/tests/unit/gapic/compute_v1/test_region_autoscalers.py @@ -626,8 +626,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -733,8 +734,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -872,8 +874,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -961,8 +964,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1046,8 +1050,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1185,8 +1190,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1263,8 +1269,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Autoscaler.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1355,8 +1362,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionAutoscalerReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Autoscaler.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1494,8 +1502,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Autoscaler.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1594,6 +1603,75 @@ def test_insert_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionAutoscalerRequest.meta.fields[ + "autoscaler_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1627,8 +1705,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1731,8 +1810,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1827,50 +1907,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1912,8 +1948,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2014,6 +2051,75 @@ def test_insert_unary_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionAutoscalerRequest.meta.fields[ + "autoscaler_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2047,8 +2153,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2129,8 +2236,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2225,50 +2333,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2310,8 +2374,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2383,8 +2448,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionAutoscalerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionAutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2474,8 +2540,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionAutoscalersReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionAutoscalerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionAutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2617,8 +2684,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionAutoscalerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionAutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2771,6 +2839,73 @@ def test_patch_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2804,8 +2939,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2911,8 +3047,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchRegionAutoscalerRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3012,50 +3149,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3097,8 +3190,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3199,6 +3293,73 @@ def test_patch_unary_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionAutoscalerRequest.meta.fields["autoscaler_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3232,8 +3393,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3319,8 +3481,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3420,50 +3583,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3505,8 +3624,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3607,6 +3727,75 @@ def test_update_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionAutoscalerRequest.meta.fields[ + "autoscaler_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3640,8 +3829,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3749,8 +3939,9 @@ def test_update_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3850,50 +4041,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3935,8 +4082,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4037,6 +4185,75 @@ def test_update_unary_rest(request_type): "target": "target_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionAutoscalerRequest.meta.fields[ + "autoscaler_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autoscaler_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autoscaler_resource"][field])): + del request_init["autoscaler_resource"][field][i][subfield] + else: + del request_init["autoscaler_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4070,8 +4287,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4157,8 +4375,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4258,50 +4477,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["autoscaler_resource"] = { - "autoscaling_policy": { - "cool_down_period_sec": 2112, - "cpu_utilization": { - "predictive_method": "predictive_method_value", - "utilization_target": 0.19540000000000002, - }, - "custom_metric_utilizations": [ - { - "filter": "filter_value", - "metric": "metric_value", - "single_instance_assignment": 0.2766, - "utilization_target": 0.19540000000000002, - "utilization_target_type": "utilization_target_type_value", - } - ], - "load_balancing_utilization": {"utilization_target": 0.19540000000000002}, - "max_num_replicas": 1703, - "min_num_replicas": 1701, - "mode": "mode_value", - "scale_in_control": { - "max_scaled_in_replicas": { - "calculated": 1042, - "fixed": 528, - "percent": 753, - }, - "time_window_sec": 1600, - }, - "scaling_schedules": {}, - }, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "recommended_size": 1693, - "region": "region_value", - "scaling_schedule_status": {}, - "self_link": "self_link_value", - "status": "status_value", - "status_details": [{"message": "message_value", "type_": "type__value"}], - "target": "target_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4343,8 +4518,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_backend_services.py b/tests/unit/gapic/compute_v1/test_region_backend_services.py index 6d29a575..a4464986 100644 --- a/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -638,8 +638,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -745,8 +746,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -888,8 +890,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -981,8 +984,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1066,8 +1070,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1209,8 +1214,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1305,8 +1311,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1411,8 +1418,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionBackendServiceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1556,8 +1564,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1617,6 +1626,81 @@ def test_get_health_rest(request_type): "backend_service": "sample3", } request_init["resource_group_reference_resource"] = {"group": "group_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.GetHealthRegionBackendServiceRequest.meta.fields[ + "resource_group_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "resource_group_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["resource_group_reference_resource"][field]) + ): + del request_init["resource_group_reference_resource"][field][i][ + subfield + ] + else: + del request_init["resource_group_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1629,8 +1713,9 @@ def test_get_health_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1714,8 +1799,9 @@ def test_get_health_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1817,7 +1903,6 @@ def test_get_health_rest_bad_request( "region": "sample2", "backend_service": "sample3", } - request_init["resource_group_reference_resource"] = {"group": "group_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1864,8 +1949,9 @@ def test_get_health_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1937,8 +2023,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2025,8 +2112,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2165,8 +2253,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2376,6 +2465,77 @@ def test_insert_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2409,8 +2569,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2513,8 +2674,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2609,161 +2771,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2805,8 +2812,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3018,6 +3026,77 @@ def test_insert_unary_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3051,8 +3130,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3133,8 +3213,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3229,161 +3310,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3425,8 +3351,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3498,8 +3425,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3591,8 +3519,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3734,8 +3663,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.BackendServiceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4003,6 +3933,77 @@ def test_patch_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4036,8 +4037,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4144,8 +4146,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4245,161 +4248,6 @@ def test_patch_rest_bad_request( "region": "sample2", "backend_service": "sample3", } - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4446,8 +4294,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4664,12 +4513,83 @@ def test_patch_unary_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } - request = request_type(**request_init) + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( client_operation_id="client_operation_id_value", creation_timestamp="creation_timestamp_value", description="description_value", @@ -4697,8 +4617,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4783,8 +4704,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4884,161 +4806,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "backend_service": "sample3", } - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5085,8 +4852,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5221,6 +4989,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionBackendServiceRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5235,8 +5078,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5322,8 +5166,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5420,83 +5265,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5543,8 +5311,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5761,6 +5530,77 @@ def test_update_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5794,8 +5634,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5902,8 +5743,9 @@ def test_update_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6003,161 +5845,6 @@ def test_update_rest_bad_request( "region": "sample2", "backend_service": "sample3", } - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6204,8 +5891,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6422,6 +6110,77 @@ def test_update_unary_rest(request_type): "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6455,8 +6214,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6541,8 +6301,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6642,161 +6403,6 @@ def test_update_unary_rest_bad_request( "region": "sample2", "backend_service": "sample3", } - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6843,8 +6449,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_commitments.py b/tests/unit/gapic/compute_v1/test_region_commitments.py index cba19838..258e23fe 100644 --- a/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.CommitmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.CommitmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.CommitmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.CommitmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -838,8 +840,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.CommitmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.CommitmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -987,8 +990,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Commitment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Commitment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1085,8 +1089,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionCommitmentReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Commitment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Commitment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1224,8 +1229,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Commitment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Commitment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1356,6 +1362,75 @@ def test_insert_rest(request_type): "status_message": "status_message_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionCommitmentRequest.meta.fields[ + "commitment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["commitment_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["commitment_resource"][field])): + del request_init["commitment_resource"][field][i][subfield] + else: + del request_init["commitment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1389,8 +1464,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1493,8 +1569,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1589,82 +1666,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["commitment_resource"] = { - "auto_renew": True, - "category": "category_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "end_timestamp": "end_timestamp_value", - "id": 205, - "kind": "kind_value", - "license_resource": { - "amount": 660, - "cores_per_license": "cores_per_license_value", - "license_": "license__value", - }, - "merge_source_commitments": [ - "merge_source_commitments_value1", - "merge_source_commitments_value2", - ], - "name": "name_value", - "plan": "plan_value", - "region": "region_value", - "reservations": [ - { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [ - {"disk_size_gb": 1261, "interface": "interface_value"} - ], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } - ], - "resources": [ - { - "accelerator_type": "accelerator_type_value", - "amount": 660, - "type_": "type__value", - } - ], - "self_link": "self_link_value", - "split_source_commitment": "split_source_commitment_value", - "start_timestamp": "start_timestamp_value", - "status": "status_value", - "status_message": "status_message_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1704,8 +1705,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1836,6 +1838,75 @@ def test_insert_unary_rest(request_type): "status_message": "status_message_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionCommitmentRequest.meta.fields[ + "commitment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["commitment_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["commitment_resource"][field])): + del request_init["commitment_resource"][field][i][subfield] + else: + del request_init["commitment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1869,8 +1940,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1951,8 +2023,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2047,82 +2120,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["commitment_resource"] = { - "auto_renew": True, - "category": "category_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "end_timestamp": "end_timestamp_value", - "id": 205, - "kind": "kind_value", - "license_resource": { - "amount": 660, - "cores_per_license": "cores_per_license_value", - "license_": "license__value", - }, - "merge_source_commitments": [ - "merge_source_commitments_value1", - "merge_source_commitments_value2", - ], - "name": "name_value", - "plan": "plan_value", - "region": "region_value", - "reservations": [ - { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [ - {"disk_size_gb": 1261, "interface": "interface_value"} - ], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } - ], - "resources": [ - { - "accelerator_type": "accelerator_type_value", - "amount": 660, - "type_": "type__value", - } - ], - "self_link": "self_link_value", - "split_source_commitment": "split_source_commitment_value", - "start_timestamp": "start_timestamp_value", - "status": "status_value", - "status_message": "status_message_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2162,8 +2159,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2233,8 +2231,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.CommitmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.CommitmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2324,8 +2323,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionCommitmentsReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.CommitmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.CommitmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2467,8 +2467,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.CommitmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.CommitmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2653,6 +2654,75 @@ def test_update_rest(request_type): "status_message": "status_message_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionCommitmentRequest.meta.fields[ + "commitment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["commitment_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["commitment_resource"][field])): + del request_init["commitment_resource"][field][i][subfield] + else: + del request_init["commitment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2686,8 +2756,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2800,8 +2871,9 @@ def test_update_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2903,82 +2975,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "commitment": "sample3"} - request_init["commitment_resource"] = { - "auto_renew": True, - "category": "category_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "end_timestamp": "end_timestamp_value", - "id": 205, - "kind": "kind_value", - "license_resource": { - "amount": 660, - "cores_per_license": "cores_per_license_value", - "license_": "license__value", - }, - "merge_source_commitments": [ - "merge_source_commitments_value1", - "merge_source_commitments_value2", - ], - "name": "name_value", - "plan": "plan_value", - "region": "region_value", - "reservations": [ - { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [ - {"disk_size_gb": 1261, "interface": "interface_value"} - ], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } - ], - "resources": [ - { - "accelerator_type": "accelerator_type_value", - "amount": 660, - "type_": "type__value", - } - ], - "self_link": "self_link_value", - "split_source_commitment": "split_source_commitment_value", - "start_timestamp": "start_timestamp_value", - "status": "status_value", - "status_message": "status_message_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3023,8 +3019,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3156,6 +3153,75 @@ def test_update_unary_rest(request_type): "status_message": "status_message_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionCommitmentRequest.meta.fields[ + "commitment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["commitment_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["commitment_resource"][field])): + del request_init["commitment_resource"][field][i][subfield] + else: + del request_init["commitment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3189,8 +3255,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3281,8 +3348,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3384,82 +3452,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "commitment": "sample3"} - request_init["commitment_resource"] = { - "auto_renew": True, - "category": "category_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "end_timestamp": "end_timestamp_value", - "id": 205, - "kind": "kind_value", - "license_resource": { - "amount": 660, - "cores_per_license": "cores_per_license_value", - "license_": "license__value", - }, - "merge_source_commitments": [ - "merge_source_commitments_value1", - "merge_source_commitments_value2", - ], - "name": "name_value", - "plan": "plan_value", - "region": "region_value", - "reservations": [ - { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [ - {"disk_size_gb": 1261, "interface": "interface_value"} - ], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } - ], - "resources": [ - { - "accelerator_type": "accelerator_type_value", - "amount": 660, - "type_": "type__value", - } - ], - "self_link": "self_link_value", - "split_source_commitment": "split_source_commitment_value", - "start_timestamp": "start_timestamp_value", - "status": "status_value", - "status_message": "status_message_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3504,8 +3496,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_disk_types.py b/tests/unit/gapic/compute_v1/test_region_disk_types.py index 3a7cc8f6..3301c54a 100644 --- a/tests/unit/gapic/compute_v1/test_region_disk_types.py +++ b/tests/unit/gapic/compute_v1/test_region_disk_types.py @@ -601,8 +601,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -692,8 +693,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionDiskTypeRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -831,8 +833,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -902,8 +905,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionDiskTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionDiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -993,8 +997,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionDiskTypesReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionDiskTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionDiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1136,8 +1141,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionDiskTypeList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionDiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_disks.py b/tests/unit/gapic/compute_v1/test_region_disks.py index 10600d2b..d18c6940 100644 --- a/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/tests/unit/gapic/compute_v1/test_region_disks.py @@ -567,6 +567,88 @@ def test_add_resource_policies_rest(request_type): request_init["region_disks_add_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesRegionDiskRequest.meta.fields[ + "region_disks_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_disks_add_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "region_disks_add_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init["region_disks_add_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -600,8 +682,9 @@ def test_add_resource_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -708,8 +791,9 @@ def test_add_resource_policies_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -805,9 +889,6 @@ def test_add_resource_policies_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -850,8 +931,9 @@ def test_add_resource_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -912,6 +994,88 @@ def test_add_resource_policies_unary_rest(request_type): request_init["region_disks_add_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesRegionDiskRequest.meta.fields[ + "region_disks_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_disks_add_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "region_disks_add_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init["region_disks_add_resource_policies_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -945,8 +1109,9 @@ def test_add_resource_policies_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1031,8 +1196,9 @@ def test_add_resource_policies_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1128,9 +1294,6 @@ def test_add_resource_policies_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1173,8 +1336,9 @@ def test_add_resource_policies_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1235,6 +1399,81 @@ def test_bulk_insert_rest(request_type): request_init["bulk_insert_disk_resource_resource"] = { "source_consistency_group_policy": "source_consistency_group_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertRegionDiskRequest.meta.fields[ + "bulk_insert_disk_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_disk_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["bulk_insert_disk_resource_resource"][field]) + ): + del request_init["bulk_insert_disk_resource_resource"][field][i][ + subfield + ] + else: + del request_init["bulk_insert_disk_resource_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1268,8 +1507,9 @@ def test_bulk_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1372,8 +1612,9 @@ def test_bulk_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1468,9 +1709,6 @@ def test_bulk_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["bulk_insert_disk_resource_resource"] = { - "source_consistency_group_policy": "source_consistency_group_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1512,8 +1750,9 @@ def test_bulk_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1573,6 +1812,81 @@ def test_bulk_insert_unary_rest(request_type): request_init["bulk_insert_disk_resource_resource"] = { "source_consistency_group_policy": "source_consistency_group_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertRegionDiskRequest.meta.fields[ + "bulk_insert_disk_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_disk_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["bulk_insert_disk_resource_resource"][field]) + ): + del request_init["bulk_insert_disk_resource_resource"][field][i][ + subfield + ] + else: + del request_init["bulk_insert_disk_resource_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1606,8 +1920,9 @@ def test_bulk_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1688,8 +2003,9 @@ def test_bulk_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1784,9 +2100,6 @@ def test_bulk_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["bulk_insert_disk_resource_resource"] = { - "source_consistency_group_policy": "source_consistency_group_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1828,8 +2141,9 @@ def test_bulk_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1923,6 +2237,75 @@ def test_create_snapshot_rest(request_type): "storage_bytes_status": "storage_bytes_status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateSnapshotRegionDiskRequest.meta.fields[ + "snapshot_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1956,8 +2339,9 @@ def test_create_snapshot_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2064,8 +2448,9 @@ def test_create_snapshot_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2161,43 +2546,6 @@ def test_create_snapshot_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["snapshot_resource"] = { - "architecture": "architecture_value", - "auto_created": True, - "chain_name": "chain_name_value", - "creation_size_bytes": 2037, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_size_gb": 1261, - "download_bytes": 1502, - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "snapshot_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "snapshot_type": "snapshot_type_value", - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", - "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", - "status": "status_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2238,8 +2586,9 @@ def test_create_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2332,6 +2681,75 @@ def test_create_snapshot_unary_rest(request_type): "storage_bytes_status": "storage_bytes_status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateSnapshotRegionDiskRequest.meta.fields[ + "snapshot_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2365,8 +2783,9 @@ def test_create_snapshot_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2451,8 +2870,9 @@ def test_create_snapshot_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2548,43 +2968,6 @@ def test_create_snapshot_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["snapshot_resource"] = { - "architecture": "architecture_value", - "auto_created": True, - "chain_name": "chain_name_value", - "creation_size_bytes": 2037, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_size_gb": 1261, - "download_bytes": 1502, - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "snapshot_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "snapshot_type": "snapshot_type_value", - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", - "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", - "status": "status_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2625,8 +3008,9 @@ def test_create_snapshot_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2715,8 +3099,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2820,8 +3205,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteRegionDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2955,8 +3341,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3044,8 +3431,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3129,8 +3517,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3264,8 +3653,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3366,8 +3756,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Disk.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3488,8 +3879,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionDiskRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Disk.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3621,8 +4013,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Disk.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3691,8 +4084,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3779,8 +4173,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3918,8 +4313,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4034,6 +4430,73 @@ def test_insert_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4067,8 +4530,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4174,8 +4638,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertRegionDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4275,66 +4740,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4374,8 +4779,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4490,6 +4896,73 @@ def test_insert_unary_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4523,8 +4996,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4610,8 +5084,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4711,66 +5186,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4810,8 +5225,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4881,8 +5297,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4972,8 +5389,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionDisksRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5111,8 +5529,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DiskList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5224,6 +5643,88 @@ def test_remove_resource_policies_rest(request_type): request_init["region_disks_remove_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveResourcePoliciesRegionDiskRequest.meta.fields[ + "region_disks_remove_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_remove_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_disks_remove_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "region_disks_remove_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_disks_remove_resource_policies_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5257,8 +5758,9 @@ def test_remove_resource_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5365,8 +5867,9 @@ def test_remove_resource_policies_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5463,9 +5966,6 @@ def test_remove_resource_policies_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_remove_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5508,8 +6008,9 @@ def test_remove_resource_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5570,6 +6071,88 @@ def test_remove_resource_policies_unary_rest(request_type): request_init["region_disks_remove_resource_policies_request_resource"] = { "resource_policies": ["resource_policies_value1", "resource_policies_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveResourcePoliciesRegionDiskRequest.meta.fields[ + "region_disks_remove_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_remove_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_disks_remove_resource_policies_request_resource" + ][field] + ), + ): + del request_init[ + "region_disks_remove_resource_policies_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_disks_remove_resource_policies_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5603,8 +6186,9 @@ def test_remove_resource_policies_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5689,8 +6273,9 @@ def test_remove_resource_policies_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5787,9 +6372,6 @@ def test_remove_resource_policies_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_remove_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5832,8 +6414,9 @@ def test_remove_resource_policies_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5892,6 +6475,83 @@ def test_resize_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["region_disks_resize_request_resource"] = {"size_gb": 739} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ResizeRegionDiskRequest.meta.fields[ + "region_disks_resize_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_disks_resize_request_resource"][field]) + ): + del request_init["region_disks_resize_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_disks_resize_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5925,8 +6585,9 @@ def test_resize_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6031,8 +6692,9 @@ def test_resize_rest_required_fields(request_type=compute.ResizeRegionDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6128,7 +6790,6 @@ def test_resize_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_resize_request_resource"] = {"size_gb": 739} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6171,8 +6832,9 @@ def test_resize_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6231,6 +6893,83 @@ def test_resize_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["region_disks_resize_request_resource"] = {"size_gb": 739} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ResizeRegionDiskRequest.meta.fields[ + "region_disks_resize_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_disks_resize_request_resource"][field]) + ): + del request_init["region_disks_resize_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_disks_resize_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6264,8 +7003,9 @@ def test_resize_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6350,8 +7090,9 @@ def test_resize_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6447,7 +7188,6 @@ def test_resize_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_resize_request_resource"] = {"size_gb": 739} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6490,8 +7230,9 @@ def test_resize_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6626,6 +7367,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionDiskRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6640,8 +7456,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6727,8 +7544,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6824,83 +7642,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6947,8 +7688,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7010,6 +7752,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsRegionDiskRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7043,8 +7860,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7151,8 +7969,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7248,10 +8067,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7298,8 +8113,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7361,6 +8177,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsRegionDiskRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7394,8 +8285,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7480,8 +8372,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7577,10 +8470,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7627,8 +8516,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7689,6 +8579,88 @@ def test_start_async_replication_rest(request_type): request_init["region_disks_start_async_replication_request_resource"] = { "async_secondary_disk": "async_secondary_disk_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StartAsyncReplicationRegionDiskRequest.meta.fields[ + "region_disks_start_async_replication_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_start_async_replication_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_disks_start_async_replication_request_resource" + ][field] + ), + ): + del request_init[ + "region_disks_start_async_replication_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_disks_start_async_replication_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7722,8 +8694,9 @@ def test_start_async_replication_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7830,8 +8803,9 @@ def test_start_async_replication_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7927,9 +8901,6 @@ def test_start_async_replication_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_start_async_replication_request_resource"] = { - "async_secondary_disk": "async_secondary_disk_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7972,8 +8943,9 @@ def test_start_async_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8034,6 +9006,88 @@ def test_start_async_replication_unary_rest(request_type): request_init["region_disks_start_async_replication_request_resource"] = { "async_secondary_disk": "async_secondary_disk_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StartAsyncReplicationRegionDiskRequest.meta.fields[ + "region_disks_start_async_replication_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_start_async_replication_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_disks_start_async_replication_request_resource" + ][field] + ), + ): + del request_init[ + "region_disks_start_async_replication_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_disks_start_async_replication_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8067,8 +9121,9 @@ def test_start_async_replication_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8153,8 +9208,9 @@ def test_start_async_replication_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8250,9 +9306,6 @@ def test_start_async_replication_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_start_async_replication_request_resource"] = { - "async_secondary_disk": "async_secondary_disk_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8295,8 +9348,9 @@ def test_start_async_replication_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8387,8 +9441,9 @@ def test_stop_async_replication_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8494,8 +9549,9 @@ def test_stop_async_replication_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8629,8 +9685,9 @@ def test_stop_async_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8718,8 +9775,9 @@ def test_stop_async_replication_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8803,8 +9861,9 @@ def test_stop_async_replication_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8938,8 +9997,9 @@ def test_stop_async_replication_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8997,6 +10057,88 @@ def test_stop_group_async_replication_rest(request_type): request_init["disks_stop_group_async_replication_resource_resource"] = { "resource_policy": "resource_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StopGroupAsyncReplicationRegionDiskRequest.meta.fields[ + "disks_stop_group_async_replication_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_stop_group_async_replication_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field] + ), + ): + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][i][subfield] + else: + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9030,8 +10172,9 @@ def test_stop_group_async_replication_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9134,8 +10277,9 @@ def test_stop_group_async_replication_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9231,9 +10375,6 @@ def test_stop_group_async_replication_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["disks_stop_group_async_replication_resource_resource"] = { - "resource_policy": "resource_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9275,8 +10416,9 @@ def test_stop_group_async_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9336,6 +10478,88 @@ def test_stop_group_async_replication_unary_rest(request_type): request_init["disks_stop_group_async_replication_resource_resource"] = { "resource_policy": "resource_policy_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.StopGroupAsyncReplicationRegionDiskRequest.meta.fields[ + "disks_stop_group_async_replication_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_stop_group_async_replication_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field] + ), + ): + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][i][subfield] + else: + del request_init[ + "disks_stop_group_async_replication_resource_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9369,8 +10593,9 @@ def test_stop_group_async_replication_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9451,8 +10676,9 @@ def test_stop_group_async_replication_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9548,9 +10774,6 @@ def test_stop_group_async_replication_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["disks_stop_group_async_replication_resource_resource"] = { - "resource_policy": "resource_policy_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9592,8 +10815,9 @@ def test_stop_group_async_replication_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9655,6 +10879,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsRegionDiskRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9667,8 +10966,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9752,8 +11052,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9851,9 +11152,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9900,8 +11198,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10019,6 +11318,73 @@ def test_update_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10052,8 +11418,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10164,8 +11531,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateRegionDiskReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10267,66 +11635,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10367,8 +11675,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10484,6 +11793,73 @@ def test_update_unary_rest(request_type): "users": ["users_value1", "users_value2"], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionDiskRequest.meta.fields["disk_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["disk_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["disk_resource"][field])): + del request_init["disk_resource"][field][i][subfield] + else: + del request_init["disk_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10517,8 +11893,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10609,8 +11986,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10712,66 +12090,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["disk_resource"] = { - "architecture": "architecture_value", - "async_primary_disk": { - "consistency_group_policy": "consistency_group_policy_value", - "consistency_group_policy_id": "consistency_group_policy_id_value", - "disk": "disk_value", - "disk_id": "disk_id_value", - }, - "async_secondary_disks": {}, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "guest_os_features": [{"type_": "type__value"}], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "last_attach_timestamp": "last_attach_timestamp_value", - "last_detach_timestamp": "last_detach_timestamp_value", - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "options": "options_value", - "params": {"resource_manager_tags": {}}, - "physical_block_size_bytes": 2663, - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "region": "region_value", - "replica_zones": ["replica_zones_value1", "replica_zones_value2"], - "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": { - "async_primary_disk": {"state": "state_value"}, - "async_secondary_disks": {}, - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "size_gb": 739, - "source_consistency_group_policy": "source_consistency_group_policy_value", - "source_consistency_group_policy_id": "source_consistency_group_policy_id_value", - "source_disk": "source_disk_value", - "source_disk_id": "source_disk_id_value", - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_image_id": "source_image_id_value", - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - "source_snapshot_id": "source_snapshot_id_value", - "source_storage_object": "source_storage_object_value", - "status": "status_value", - "type_": "type__value", - "users": ["users_value1", "users_value2"], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10812,8 +12130,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_health_check_services.py b/tests/unit/gapic/compute_v1/test_region_health_check_services.py index affcda70..1b8343f4 100644 --- a/tests/unit/gapic/compute_v1/test_region_health_check_services.py +++ b/tests/unit/gapic/compute_v1/test_region_health_check_services.py @@ -646,8 +646,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -753,8 +754,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -896,8 +898,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -989,8 +992,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1074,8 +1078,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1217,8 +1222,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1300,8 +1306,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1398,8 +1405,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1543,8 +1551,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckService.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1619,6 +1628,81 @@ def test_insert_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionHealthCheckServiceRequest.meta.fields[ + "health_check_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["health_check_service_resource"][field]) + ): + del request_init["health_check_service_resource"][field][i][ + subfield + ] + else: + del request_init["health_check_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1652,8 +1736,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1756,8 +1841,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1852,26 +1938,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["health_check_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "health_status_aggregation_policy": "health_status_aggregation_policy_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network_endpoint_groups": [ - "network_endpoint_groups_value1", - "network_endpoint_groups_value2", - ], - "notification_endpoints": [ - "notification_endpoints_value1", - "notification_endpoints_value2", - ], - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1913,8 +1979,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1991,6 +2058,81 @@ def test_insert_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionHealthCheckServiceRequest.meta.fields[ + "health_check_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["health_check_service_resource"][field]) + ): + del request_init["health_check_service_resource"][field][i][ + subfield + ] + else: + del request_init["health_check_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2024,8 +2166,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2106,8 +2249,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2202,26 +2346,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["health_check_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "health_status_aggregation_policy": "health_status_aggregation_policy_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network_endpoint_groups": [ - "network_endpoint_groups_value1", - "network_endpoint_groups_value2", - ], - "notification_endpoints": [ - "notification_endpoints_value1", - "notification_endpoints_value2", - ], - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2263,8 +2387,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2336,8 +2461,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckServicesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckServicesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2429,8 +2555,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckServicesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckServicesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2572,8 +2699,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckServicesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckServicesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2706,6 +2834,81 @@ def test_patch_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionHealthCheckServiceRequest.meta.fields[ + "health_check_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["health_check_service_resource"][field]) + ): + del request_init["health_check_service_resource"][field][i][ + subfield + ] + else: + del request_init["health_check_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2739,8 +2942,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2847,8 +3051,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2948,26 +3153,6 @@ def test_patch_rest_bad_request( "region": "sample2", "health_check_service": "sample3", } - request_init["health_check_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "health_status_aggregation_policy": "health_status_aggregation_policy_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network_endpoint_groups": [ - "network_endpoint_groups_value1", - "network_endpoint_groups_value2", - ], - "notification_endpoints": [ - "notification_endpoints_value1", - "notification_endpoints_value2", - ], - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3014,8 +3199,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3097,6 +3283,81 @@ def test_patch_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionHealthCheckServiceRequest.meta.fields[ + "health_check_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["health_check_service_resource"][field]) + ): + del request_init["health_check_service_resource"][field][i][ + subfield + ] + else: + del request_init["health_check_service_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3130,8 +3391,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3216,8 +3478,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3317,26 +3580,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "health_check_service": "sample3", } - request_init["health_check_service_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "health_status_aggregation_policy": "health_status_aggregation_policy_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network_endpoint_groups": [ - "network_endpoint_groups_value1", - "network_endpoint_groups_value2", - ], - "notification_endpoints": [ - "notification_endpoints_value1", - "notification_endpoints_value2", - ], - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3383,8 +3626,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_health_checks.py b/tests/unit/gapic/compute_v1/test_region_health_checks.py index b65c55db..d583365c 100644 --- a/tests/unit/gapic/compute_v1/test_region_health_checks.py +++ b/tests/unit/gapic/compute_v1/test_region_health_checks.py @@ -630,8 +630,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -737,8 +738,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -880,8 +882,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -973,8 +976,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1058,8 +1062,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1201,8 +1206,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1284,8 +1290,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheck.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1377,8 +1384,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionHealthCheckReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheck.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1520,8 +1528,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheck.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1640,6 +1649,77 @@ def test_insert_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionHealthCheckRequest.meta.fields[ + "health_check_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1673,8 +1753,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1777,8 +1858,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1873,70 +1955,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1976,8 +1994,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2096,6 +2115,77 @@ def test_insert_unary_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionHealthCheckRequest.meta.fields[ + "health_check_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2129,8 +2219,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2211,8 +2302,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2307,70 +2399,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2410,8 +2438,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2481,8 +2510,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2572,8 +2602,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionHealthChecksRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2715,8 +2746,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.HealthCheckList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2893,6 +2925,77 @@ def test_patch_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionHealthCheckRequest.meta.fields[ + "health_check_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2926,8 +3029,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3032,8 +3136,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchRegionHealthCheckR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3133,70 +3238,6 @@ def test_patch_rest_bad_request( "region": "sample2", "health_check": "sample3", } - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3241,8 +3282,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3366,6 +3408,77 @@ def test_patch_unary_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionHealthCheckRequest.meta.fields[ + "health_check_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3399,8 +3512,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3485,8 +3599,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3586,70 +3701,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "health_check": "sample3", } - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3694,8 +3745,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3819,6 +3871,77 @@ def test_update_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionHealthCheckRequest.meta.fields[ + "health_check_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3852,8 +3975,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3960,8 +4084,9 @@ def test_update_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4061,70 +4186,6 @@ def test_update_rest_bad_request( "region": "sample2", "health_check": "sample3", } - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4169,8 +4230,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4294,6 +4356,77 @@ def test_update_unary_rest(request_type): "type_": "type__value", "unhealthy_threshold": 2046, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionHealthCheckRequest.meta.fields[ + "health_check_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "health_check_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["health_check_resource"][field])): + del request_init["health_check_resource"][field][i][subfield] + else: + del request_init["health_check_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4327,8 +4460,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4413,8 +4547,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4514,70 +4649,6 @@ def test_update_unary_rest_bad_request( "region": "sample2", "health_check": "sample3", } - request_init["health_check_resource"] = { - "check_interval_sec": 1884, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_health_check": { - "grpc_service_name": "grpc_service_name_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - }, - "healthy_threshold": 1819, - "http2_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "http_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "https_health_check": { - "host": "host_value", - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request_path": "request_path_value", - "response": "response_value", - }, - "id": 205, - "kind": "kind_value", - "log_config": {"enable": True}, - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "ssl_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "tcp_health_check": { - "port": 453, - "port_name": "port_name_value", - "port_specification": "port_specification_value", - "proxy_header": "proxy_header_value", - "request": "request_value", - "response": "response_value", - }, - "timeout_sec": 1185, - "type_": "type__value", - "unhealthy_threshold": 2046, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4622,8 +4693,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index 8a156f00..bd298a46 100644 --- a/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -616,6 +616,88 @@ def test_abandon_instances_rest(request_type): request_init[ "region_instance_group_managers_abandon_instances_request_resource" ] = {"instances": ["instances_value1", "instances_value2"]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AbandonInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_abandon_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -649,8 +731,9 @@ def test_abandon_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -757,8 +840,9 @@ def test_abandon_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -859,9 +943,6 @@ def test_abandon_instances_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init[ - "region_instance_group_managers_abandon_instances_request_resource" - ] = {"instances": ["instances_value1", "instances_value2"]} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -908,8 +989,9 @@ def test_abandon_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -974,6 +1056,88 @@ def test_abandon_instances_unary_rest(request_type): request_init[ "region_instance_group_managers_abandon_instances_request_resource" ] = {"instances": ["instances_value1", "instances_value2"]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AbandonInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_abandon_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1007,8 +1171,9 @@ def test_abandon_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1093,8 +1258,9 @@ def test_abandon_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1195,9 +1361,6 @@ def test_abandon_instances_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init[ - "region_instance_group_managers_abandon_instances_request_resource" - ] = {"instances": ["instances_value1", "instances_value2"]} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1244,8 +1407,9 @@ def test_abandon_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1313,6 +1477,90 @@ def test_apply_updates_to_instances_rest(request_type): "minimal_action": "minimal_action_value", "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_apply_updates_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1346,8 +1594,9 @@ def test_apply_updates_to_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1452,8 +1701,9 @@ def test_apply_updates_to_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1558,12 +1808,6 @@ def test_apply_updates_to_instances_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_apply_updates_request_resource"] = { - "all_instances": True, - "instances": ["instances_value1", "instances_value2"], - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1610,8 +1854,9 @@ def test_apply_updates_to_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1679,6 +1924,90 @@ def test_apply_updates_to_instances_unary_rest(request_type): "minimal_action": "minimal_action_value", "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_apply_updates_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1712,8 +2041,9 @@ def test_apply_updates_to_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1796,8 +2126,9 @@ def test_apply_updates_to_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1902,12 +2233,6 @@ def test_apply_updates_to_instances_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_apply_updates_request_resource"] = { - "all_instances": True, - "instances": ["instances_value1", "instances_value2"], - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1954,8 +2279,9 @@ def test_apply_updates_to_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2027,6 +2353,88 @@ def test_create_instances_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_create_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_create_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_create_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_create_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_create_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2060,8 +2468,9 @@ def test_create_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2168,8 +2577,9 @@ def test_create_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2270,16 +2680,6 @@ def test_create_instances_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_create_instances_request_resource"] = { - "instances": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2326,8 +2726,9 @@ def test_create_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2399,6 +2800,88 @@ def test_create_instances_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.CreateInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_create_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_create_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_create_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_create_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_create_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2432,8 +2915,9 @@ def test_create_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2518,8 +3002,9 @@ def test_create_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2620,16 +3105,6 @@ def test_create_instances_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_create_instances_request_resource"] = { - "instances": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2676,8 +3151,9 @@ def test_create_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2772,8 +3248,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2879,8 +3356,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3023,8 +3501,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3116,8 +3595,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3201,8 +3681,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3345,8 +3826,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3409,6 +3891,88 @@ def test_delete_instances_rest(request_type): "instances": ["instances_value1", "instances_value2"], "skip_instances_on_validation_error": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeleteInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_delete_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3442,8 +4006,9 @@ def test_delete_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3550,8 +4115,9 @@ def test_delete_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3652,10 +4218,6 @@ def test_delete_instances_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_delete_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"], - "skip_instances_on_validation_error": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3702,8 +4264,9 @@ def test_delete_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3769,6 +4332,88 @@ def test_delete_instances_unary_rest(request_type): "instances": ["instances_value1", "instances_value2"], "skip_instances_on_validation_error": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.DeleteInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_delete_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3802,8 +4447,9 @@ def test_delete_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3888,8 +4534,9 @@ def test_delete_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3990,10 +4637,6 @@ def test_delete_instances_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_delete_instances_request_resource"] = { - "instances": ["instances_value1", "instances_value2"], - "skip_instances_on_validation_error": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4040,8 +4683,9 @@ def test_delete_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4106,6 +4750,90 @@ def test_delete_per_instance_configs_rest(request_type): request_init[ "region_instance_group_manager_delete_instance_config_req_resource" ] = {"names": ["names_value1", "names_value2"]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_manager_delete_instance_config_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4139,8 +4867,9 @@ def test_delete_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4245,8 +4974,9 @@ def test_delete_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4351,9 +5081,6 @@ def test_delete_per_instance_configs_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init[ - "region_instance_group_manager_delete_instance_config_req_resource" - ] = {"names": ["names_value1", "names_value2"]} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4400,8 +5127,9 @@ def test_delete_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4466,6 +5194,90 @@ def test_delete_per_instance_configs_unary_rest(request_type): request_init[ "region_instance_group_manager_delete_instance_config_req_resource" ] = {"names": ["names_value1", "names_value2"]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_manager_delete_instance_config_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4499,8 +5311,9 @@ def test_delete_per_instance_configs_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4583,8 +5396,9 @@ def test_delete_per_instance_configs_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4689,9 +5503,6 @@ def test_delete_per_instance_configs_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init[ - "region_instance_group_manager_delete_instance_config_req_resource" - ] = {"names": ["names_value1", "names_value2"]} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4738,8 +5549,9 @@ def test_delete_per_instance_configs_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4829,8 +5641,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManager.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4930,8 +5743,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManager.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5075,8 +5889,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroupManager.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5200,6 +6015,81 @@ def test_insert_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5233,8 +6123,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5337,8 +6228,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5434,75 +6326,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5548,8 +6371,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5679,6 +6503,81 @@ def test_insert_unary_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5712,8 +6611,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5794,8 +6694,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5891,75 +6792,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6005,8 +6837,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6082,8 +6915,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6175,8 +7009,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6318,8 +7153,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagerList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6446,10 +7282,11 @@ def test_list_errors_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6542,10 +7379,11 @@ def test_list_errors_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6700,10 +7538,11 @@ def test_list_errors_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6836,10 +7675,11 @@ def test_list_managed_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6932,12 +7772,11 @@ def test_list_managed_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( - compute.RegionInstanceGroupManagersListInstancesResponse.pb( - return_value - ) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb( + return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7094,10 +7933,11 @@ def test_list_managed_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7230,10 +8070,11 @@ def test_list_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7326,12 +8167,13 @@ def test_list_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7490,10 +8332,11 @@ def test_list_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb( + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7683,6 +8526,81 @@ def test_patch_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7716,8 +8634,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7824,8 +8743,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7925,75 +8845,6 @@ def test_patch_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8044,8 +8895,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8180,6 +9032,81 @@ def test_patch_unary_rest(request_type): ], "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionInstanceGroupManagerRequest.meta.fields[ + "instance_group_manager_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_group_manager_resource"][field]) + ): + del request_init["instance_group_manager_resource"][field][i][ + subfield + ] + else: + del request_init["instance_group_manager_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8213,8 +9140,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8299,8 +9227,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8400,75 +9329,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["instance_group_manager_resource"] = { - "auto_healing_policies": [ - {"health_check": "health_check_value", "initial_delay_sec": 1778} - ], - "base_instance_name": "base_instance_name_value", - "creation_timestamp": "creation_timestamp_value", - "current_actions": { - "abandoning": 1041, - "creating": 845, - "creating_without_retries": 2589, - "deleting": 844, - "none": 432, - "recreating": 1060, - "refreshing": 1069, - "restarting": 1091, - "resuming": 874, - "starting": 876, - "stopping": 884, - "suspending": 1088, - "verifying": 979, - }, - "description": "description_value", - "distribution_policy": { - "target_shape": "target_shape_value", - "zones": [{"zone": "zone_value"}], - }, - "fingerprint": "fingerprint_value", - "id": 205, - "instance_group": "instance_group_value", - "instance_lifecycle_policy": { - "force_update_on_repair": "force_update_on_repair_value" - }, - "instance_template": "instance_template_value", - "kind": "kind_value", - "list_managed_instances_results": "list_managed_instances_results_value", - "name": "name_value", - "named_ports": [{"name": "name_value", "port": 453}], - "region": "region_value", - "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, - "status": { - "autoscaler": "autoscaler_value", - "is_stable": True, - "stateful": { - "has_stateful_config": True, - "per_instance_configs": {"all_effective": True}, - }, - "version_target": {"is_reached": True}, - }, - "target_pools": ["target_pools_value1", "target_pools_value2"], - "target_size": 1185, - "update_policy": { - "instance_redistribution_type": "instance_redistribution_type_value", - "max_surge": {"calculated": 1042, "fixed": 528, "percent": 753}, - "max_unavailable": {}, - "minimal_action": "minimal_action_value", - "most_disruptive_allowed_action": "most_disruptive_allowed_action_value", - "replacement_method": "replacement_method_value", - "type_": "type__value", - }, - "versions": [ - { - "instance_template": "instance_template_value", - "name": "name_value", - "target_size": {}, - } - ], - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8519,8 +9379,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8596,6 +9457,90 @@ def test_patch_per_instance_configs_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_manager_patch_instance_config_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8629,8 +9574,9 @@ def test_patch_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8737,8 +9683,9 @@ def test_patch_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8843,16 +9790,6 @@ def test_patch_per_instance_configs_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_manager_patch_instance_config_req_resource"] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8901,8 +9838,9 @@ def test_patch_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8976,6 +9914,90 @@ def test_patch_per_instance_configs_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_manager_patch_instance_config_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9009,8 +10031,9 @@ def test_patch_per_instance_configs_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9095,8 +10118,9 @@ def test_patch_per_instance_configs_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9201,16 +10225,6 @@ def test_patch_per_instance_configs_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_manager_patch_instance_config_req_resource"] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9259,8 +10273,9 @@ def test_patch_per_instance_configs_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9327,6 +10342,88 @@ def test_recreate_instances_rest(request_type): request_init["region_instance_group_managers_recreate_request_resource"] = { "instances": ["instances_value1", "instances_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RecreateInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_recreate_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_recreate_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_recreate_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_recreate_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_recreate_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9360,8 +10457,9 @@ def test_recreate_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9468,8 +10566,9 @@ def test_recreate_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9570,9 +10669,6 @@ def test_recreate_instances_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_recreate_request_resource"] = { - "instances": ["instances_value1", "instances_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9619,8 +10715,9 @@ def test_recreate_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9685,6 +10782,88 @@ def test_recreate_instances_unary_rest(request_type): request_init["region_instance_group_managers_recreate_request_resource"] = { "instances": ["instances_value1", "instances_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RecreateInstancesRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_recreate_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_recreate_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_recreate_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_recreate_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_recreate_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9718,8 +10897,9 @@ def test_recreate_instances_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9804,8 +10984,9 @@ def test_recreate_instances_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9906,9 +11087,6 @@ def test_recreate_instances_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_recreate_request_resource"] = { - "instances": ["instances_value1", "instances_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9955,8 +11133,9 @@ def test_recreate_instances_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10051,8 +11230,9 @@ def test_resize_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10170,8 +11350,9 @@ def test_resize_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10326,8 +11507,9 @@ def test_resize_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10420,8 +11602,9 @@ def test_resize_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10517,8 +11700,9 @@ def test_resize_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10673,8 +11857,9 @@ def test_resize_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10737,6 +11922,90 @@ def test_set_instance_template_rest(request_type): request_init["region_instance_group_managers_set_template_request_resource"] = { "instance_template": "instance_template_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_set_template_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_set_template_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_set_template_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_set_template_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_set_template_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10770,8 +12039,9 @@ def test_set_instance_template_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10878,8 +12148,9 @@ def test_set_instance_template_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10982,9 +12253,6 @@ def test_set_instance_template_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_set_template_request_resource"] = { - "instance_template": "instance_template_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11031,8 +12299,9 @@ def test_set_instance_template_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11097,6 +12366,90 @@ def test_set_instance_template_unary_rest(request_type): request_init["region_instance_group_managers_set_template_request_resource"] = { "instance_template": "instance_template_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_set_template_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_set_template_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_set_template_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_set_template_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_set_template_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11130,8 +12483,9 @@ def test_set_instance_template_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11216,8 +12570,9 @@ def test_set_instance_template_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11320,9 +12675,6 @@ def test_set_instance_template_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_set_template_request_resource"] = { - "instance_template": "instance_template_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11369,8 +12721,9 @@ def test_set_instance_template_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11436,6 +12789,88 @@ def test_set_target_pools_rest(request_type): "fingerprint": "fingerprint_value", "target_pools": ["target_pools_value1", "target_pools_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_set_target_pools_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11469,8 +12904,9 @@ def test_set_target_pools_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11577,8 +13013,9 @@ def test_set_target_pools_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11679,10 +13116,6 @@ def test_set_target_pools_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_set_target_pools_request_resource"] = { - "fingerprint": "fingerprint_value", - "target_pools": ["target_pools_value1", "target_pools_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11729,8 +13162,9 @@ def test_set_target_pools_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11796,6 +13230,88 @@ def test_set_target_pools_unary_rest(request_type): "fingerprint": "fingerprint_value", "target_pools": ["target_pools_value1", "target_pools_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_managers_set_target_pools_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11829,8 +13345,9 @@ def test_set_target_pools_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11915,8 +13432,9 @@ def test_set_target_pools_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12017,10 +13535,6 @@ def test_set_target_pools_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init["region_instance_group_managers_set_target_pools_request_resource"] = { - "fingerprint": "fingerprint_value", - "target_pools": ["target_pools_value1", "target_pools_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12067,8 +13581,9 @@ def test_set_target_pools_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12142,6 +13657,90 @@ def test_update_per_instance_configs_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_manager_update_instance_config_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12175,8 +13774,9 @@ def test_update_per_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12283,8 +13883,9 @@ def test_update_per_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12389,18 +13990,6 @@ def test_update_per_instance_configs_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init[ - "region_instance_group_manager_update_instance_config_req_resource" - ] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12449,8 +14038,9 @@ def test_update_per_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12526,6 +14116,90 @@ def test_update_per_instance_configs_unary_rest(request_type): } ] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.meta.fields[ + "region_instance_group_manager_update_instance_config_req_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ][field] + ), + ): + del request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12559,8 +14233,9 @@ def test_update_per_instance_configs_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12645,8 +14320,9 @@ def test_update_per_instance_configs_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12751,18 +14427,6 @@ def test_update_per_instance_configs_unary_rest_bad_request( "region": "sample2", "instance_group_manager": "sample3", } - request_init[ - "region_instance_group_manager_update_instance_config_req_resource" - ] = { - "per_instance_configs": [ - { - "fingerprint": "fingerprint_value", - "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, - "status": "status_value", - } - ] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12811,8 +14475,9 @@ def test_update_per_instance_configs_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_instance_groups.py b/tests/unit/gapic/compute_v1/test_region_instance_groups.py index 2202131b..d5b5db6e 100644 --- a/tests/unit/gapic/compute_v1/test_region_instance_groups.py +++ b/tests/unit/gapic/compute_v1/test_region_instance_groups.py @@ -628,8 +628,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -721,8 +722,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionInstanceGroupReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -866,8 +868,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -937,8 +940,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1030,8 +1034,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1173,8 +1178,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1291,6 +1297,88 @@ def test_list_instances_rest(request_type): "instance_state": "instance_state_value", "port_name": "port_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ListInstancesRegionInstanceGroupsRequest.meta.fields[ + "region_instance_groups_list_instances_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_groups_list_instances_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_groups_list_instances_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_groups_list_instances_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_groups_list_instances_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1306,8 +1394,9 @@ def test_list_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1404,8 +1493,9 @@ def test_list_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1516,10 +1606,6 @@ def test_list_instances_rest_bad_request( "region": "sample2", "instance_group": "sample3", } - request_init["region_instance_groups_list_instances_request_resource"] = { - "instance_state": "instance_state_value", - "port_name": "port_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1566,8 +1652,9 @@ def test_list_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1699,6 +1786,88 @@ def test_set_named_ports_rest(request_type): "fingerprint": "fingerprint_value", "named_ports": [{"name": "name_value", "port": 453}], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNamedPortsRegionInstanceGroupRequest.meta.fields[ + "region_instance_groups_set_named_ports_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_groups_set_named_ports_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_groups_set_named_ports_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_groups_set_named_ports_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_groups_set_named_ports_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1732,8 +1901,9 @@ def test_set_named_ports_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1840,8 +2010,9 @@ def test_set_named_ports_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1942,10 +2113,6 @@ def test_set_named_ports_rest_bad_request( "region": "sample2", "instance_group": "sample3", } - request_init["region_instance_groups_set_named_ports_request_resource"] = { - "fingerprint": "fingerprint_value", - "named_ports": [{"name": "name_value", "port": 453}], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1992,8 +2159,9 @@ def test_set_named_ports_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2059,6 +2227,88 @@ def test_set_named_ports_unary_rest(request_type): "fingerprint": "fingerprint_value", "named_ports": [{"name": "name_value", "port": 453}], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetNamedPortsRegionInstanceGroupRequest.meta.fields[ + "region_instance_groups_set_named_ports_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_instance_groups_set_named_ports_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_instance_groups_set_named_ports_request_resource" + ][field] + ), + ): + del request_init[ + "region_instance_groups_set_named_ports_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_instance_groups_set_named_ports_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2092,8 +2342,9 @@ def test_set_named_ports_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2178,8 +2429,9 @@ def test_set_named_ports_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2280,10 +2532,6 @@ def test_set_named_ports_unary_rest_bad_request( "region": "sample2", "instance_group": "sample3", } - request_init["region_instance_groups_set_named_ports_request_resource"] = { - "fingerprint": "fingerprint_value", - "named_ports": [{"name": "name_value", "port": 453}], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2330,8 +2578,9 @@ def test_set_named_ports_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/tests/unit/gapic/compute_v1/test_region_instance_templates.py index 40f9ddd4..b75b3187 100644 --- a/tests/unit/gapic/compute_v1/test_region_instance_templates.py +++ b/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -640,8 +640,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -747,8 +748,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -890,8 +892,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -983,8 +986,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1211,8 +1216,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1290,8 +1296,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1381,8 +1388,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1526,8 +1534,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1767,6 +1776,79 @@ def test_insert_rest(request_type): ] }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionInstanceTemplateRequest.meta.fields[ + "instance_template_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_template_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_template_resource"][field]) + ): + del request_init["instance_template_resource"][field][i][subfield] + else: + del request_init["instance_template_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1800,8 +1882,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1904,8 +1987,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2000,191 +2084,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["instance_template_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "region": "region_value", - "self_link": "self_link_value", - "source_instance": "source_instance_value", - "source_instance_params": { - "disk_configs": [ - { - "auto_delete": True, - "custom_image": "custom_image_value", - "device_name": "device_name_value", - "instantiate_from": "instantiate_from_value", - } - ] - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2226,8 +2125,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2469,6 +2369,79 @@ def test_insert_unary_rest(request_type): ] }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionInstanceTemplateRequest.meta.fields[ + "instance_template_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_template_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_template_resource"][field]) + ): + del request_init["instance_template_resource"][field][i][subfield] + else: + del request_init["instance_template_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2502,8 +2475,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2584,8 +2558,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2680,191 +2655,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["instance_template_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "region": "region_value", - "self_link": "self_link_value", - "source_instance": "source_instance_value", - "source_instance_params": { - "disk_configs": [ - { - "auto_delete": True, - "custom_image": "custom_image_value", - "device_name": "device_name_value", - "instantiate_from": "instantiate_from_value", - } - ] - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2906,8 +2696,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2979,8 +2770,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3072,8 +2864,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3215,8 +3008,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.InstanceTemplateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_instances.py b/tests/unit/gapic/compute_v1/test_region_instances.py index ccec113a..3092e012 100644 --- a/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/tests/unit/gapic/compute_v1/test_region_instances.py @@ -759,6 +759,84 @@ def test_bulk_insert_rest(request_type): "per_instance_properties": {}, "source_instance_template": "source_instance_template_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertRegionInstanceRequest.meta.fields[ + "bulk_insert_instance_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_instance_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["bulk_insert_instance_resource_resource"][field]), + ): + del request_init["bulk_insert_instance_resource_resource"][field][ + i + ][subfield] + else: + del request_init["bulk_insert_instance_resource_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -792,8 +870,9 @@ def test_bulk_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -896,8 +975,9 @@ def test_bulk_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -992,179 +1072,6 @@ def test_bulk_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["bulk_insert_instance_resource_resource"] = { - "count": 553, - "instance_properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "location_policy": {"locations": {}, "target_shape": "target_shape_value"}, - "min_count": 972, - "name_pattern": "name_pattern_value", - "per_instance_properties": {}, - "source_instance_template": "source_instance_template_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1206,8 +1113,9 @@ def test_bulk_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1437,6 +1345,84 @@ def test_bulk_insert_unary_rest(request_type): "per_instance_properties": {}, "source_instance_template": "source_instance_template_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.BulkInsertRegionInstanceRequest.meta.fields[ + "bulk_insert_instance_resource_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "bulk_insert_instance_resource_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len(request_init["bulk_insert_instance_resource_resource"][field]), + ): + del request_init["bulk_insert_instance_resource_resource"][field][ + i + ][subfield] + else: + del request_init["bulk_insert_instance_resource_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1470,8 +1456,9 @@ def test_bulk_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1552,8 +1539,9 @@ def test_bulk_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1648,179 +1636,6 @@ def test_bulk_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["bulk_insert_instance_resource_resource"] = { - "count": 553, - "instance_properties": { - "advanced_machine_features": { - "enable_nested_virtualization": True, - "enable_uefi_networking": True, - "threads_per_core": 1689, - "visible_core_count": 1918, - }, - "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, - "description": "description_value", - "disks": [ - { - "architecture": "architecture_value", - "auto_delete": True, - "boot": True, - "device_name": "device_name_value", - "disk_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "disk_size_gb": 1261, - "force_attach": True, - "guest_os_features": [{"type_": "type__value"}], - "index": 536, - "initialize_params": { - "architecture": "architecture_value", - "description": "description_value", - "disk_name": "disk_name_value", - "disk_size_gb": 1261, - "disk_type": "disk_type_value", - "labels": {}, - "licenses": ["licenses_value1", "licenses_value2"], - "on_update_action": "on_update_action_value", - "provisioned_iops": 1740, - "provisioned_throughput": 2411, - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "source_image": "source_image_value", - "source_image_encryption_key": {}, - "source_snapshot": "source_snapshot_value", - "source_snapshot_encryption_key": {}, - }, - "interface": "interface_value", - "kind": "kind_value", - "licenses": ["licenses_value1", "licenses_value2"], - "mode": "mode_value", - "saved_state": "saved_state_value", - "shielded_instance_initial_state": { - "dbs": [ - {"content": "content_value", "file_type": "file_type_value"} - ], - "dbxs": {}, - "keks": {}, - "pk": {}, - }, - "source": "source_value", - "type_": "type__value", - } - ], - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "key_revocation_action_type": "key_revocation_action_type_value", - "labels": {}, - "machine_type": "machine_type_value", - "metadata": { - "fingerprint": "fingerprint_value", - "items": [{"key": "key_value", "value": "value_value"}], - "kind": "kind_value", - }, - "min_cpu_platform": "min_cpu_platform_value", - "network_interfaces": [ - { - "access_configs": [ - { - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "kind": "kind_value", - "name": "name_value", - "nat_i_p": "nat_i_p_value", - "network_tier": "network_tier_value", - "public_ptr_domain_name": "public_ptr_domain_name_value", - "set_public_ptr": True, - "type_": "type__value", - } - ], - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "fingerprint": "fingerprint_value", - "internal_ipv6_prefix_length": 2831, - "ipv6_access_configs": {}, - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_address": "ipv6_address_value", - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_attachment": "network_attachment_value", - "network_i_p": "network_i_p_value", - "nic_type": "nic_type_value", - "queue_count": 1197, - "stack_type": "stack_type_value", - "subnetwork": "subnetwork_value", - } - ], - "network_performance_config": { - "total_egress_bandwidth_tier": "total_egress_bandwidth_tier_value" - }, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "reservation_affinity": { - "consume_reservation_type": "consume_reservation_type_value", - "key": "key_value", - "values": ["values_value1", "values_value2"], - }, - "resource_manager_tags": {}, - "resource_policies": [ - "resource_policies_value1", - "resource_policies_value2", - ], - "scheduling": { - "automatic_restart": True, - "instance_termination_action": "instance_termination_action_value", - "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, - "location_hint": "location_hint_value", - "min_node_cpus": 1379, - "node_affinities": [ - { - "key": "key_value", - "operator": "operator_value", - "values": ["values_value1", "values_value2"], - } - ], - "on_host_maintenance": "on_host_maintenance_value", - "preemptible": True, - "provisioning_model": "provisioning_model_value", - }, - "service_accounts": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "shielded_instance_config": { - "enable_integrity_monitoring": True, - "enable_secure_boot": True, - "enable_vtpm": True, - }, - "tags": { - "fingerprint": "fingerprint_value", - "items": ["items_value1", "items_value2"], - }, - }, - "location_policy": {"locations": {}, "target_shape": "target_shape_value"}, - "min_count": 972, - "name_pattern": "name_pattern_value", - "per_instance_properties": {}, - "source_instance_template": "source_instance_template_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1862,8 +1677,9 @@ def test_bulk_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py index 162efbe9..66b730fd 100644 --- a/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py +++ b/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -646,8 +646,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -753,8 +754,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -897,8 +899,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -990,8 +993,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1075,8 +1079,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1219,8 +1224,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1304,8 +1310,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1401,8 +1408,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1546,8 +1554,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1635,6 +1644,81 @@ def test_insert_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1668,8 +1752,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1772,8 +1857,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1869,39 +1955,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1943,8 +1996,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2034,6 +2088,81 @@ def test_insert_unary_rest(request_type): "subnetwork": "subnetwork_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2067,8 +2196,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2149,8 +2279,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2246,39 +2377,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2320,8 +2418,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2393,8 +2492,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2486,8 +2586,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2629,8 +2730,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py b/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py index 366eedfa..86268d6a 100644 --- a/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py +++ b/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py @@ -622,6 +622,83 @@ def test_add_association_rest(request_type): "name": "name_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAssociationRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_association_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_association_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_association_resource"][field]) + ): + del request_init["firewall_policy_association_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_association_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -655,8 +732,9 @@ def test_add_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -768,8 +846,9 @@ def test_add_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -875,13 +954,6 @@ def test_add_association_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_association_resource"] = { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -928,8 +1000,9 @@ def test_add_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -998,6 +1071,83 @@ def test_add_association_unary_rest(request_type): "name": "name_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddAssociationRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_association_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_association_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_association_resource"][field]) + ): + del request_init["firewall_policy_association_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_association_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1031,8 +1181,9 @@ def test_add_association_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1122,8 +1273,9 @@ def test_add_association_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1229,13 +1381,6 @@ def test_add_association_unary_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_association_resource"] = { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1282,8 +1427,9 @@ def test_add_association_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1396,6 +1542,81 @@ def test_add_rule_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1429,8 +1650,9 @@ def test_add_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1543,8 +1765,9 @@ def test_add_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1651,57 +1874,6 @@ def test_add_rule_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1748,8 +1920,9 @@ def test_add_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1862,6 +2035,81 @@ def test_add_rule_unary_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1895,8 +2143,9 @@ def test_add_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1987,8 +2236,9 @@ def test_add_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2095,57 +2345,6 @@ def test_add_rule_unary_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2192,8 +2391,9 @@ def test_add_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2288,8 +2488,9 @@ def test_clone_rules_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2400,8 +2601,9 @@ def test_clone_rules_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2549,8 +2751,9 @@ def test_clone_rules_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2642,8 +2845,9 @@ def test_clone_rules_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2732,8 +2936,9 @@ def test_clone_rules_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2881,8 +3086,9 @@ def test_clone_rules_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2974,8 +3180,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3081,8 +3288,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3225,8 +3433,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3318,8 +3527,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3403,8 +3613,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3547,8 +3758,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3631,8 +3843,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3727,8 +3940,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3872,8 +4086,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3948,8 +4163,9 @@ def test_get_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4038,8 +4254,9 @@ def test_get_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4184,8 +4401,9 @@ def test_get_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4252,12 +4470,13 @@ def test_get_effective_firewalls_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4346,12 +4565,13 @@ def test_get_effective_firewalls_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4501,12 +4721,13 @@ def test_get_effective_firewalls_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4575,8 +4796,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4663,8 +4885,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4803,8 +5026,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4885,8 +5109,9 @@ def test_get_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4981,8 +5206,9 @@ def test_get_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5127,8 +5353,9 @@ def test_get_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5269,6 +5496,77 @@ def test_insert_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5302,8 +5600,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5406,8 +5705,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5503,92 +5803,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5634,8 +5848,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5782,6 +5997,77 @@ def test_insert_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5815,8 +6101,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5897,8 +6184,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5994,92 +6282,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6125,8 +6327,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6201,8 +6404,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6293,8 +6497,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6437,8 +6642,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.FirewallPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6637,6 +6843,77 @@ def test_patch_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6670,8 +6947,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6778,8 +7056,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6880,92 +7159,6 @@ def test_patch_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7016,8 +7209,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7169,6 +7363,77 @@ def test_patch_unary_rest(request_type): "self_link_with_id": "self_link_with_id_value", "short_name": "short_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_policy_resource"][field])): + del request_init["firewall_policy_resource"][field][i][subfield] + else: + del request_init["firewall_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7202,8 +7467,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7288,8 +7554,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7362,119 +7629,33 @@ def test_patch_unary_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() - - client.patch_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_patch_unary_rest_bad_request( - transport: str = "rest", - request_type=compute.PatchRegionNetworkFirewallPolicyRequest, -): - client = RegionNetworkFirewallPoliciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "project": "sample1", - "region": "sample2", - "firewall_policy": "sample3", - } - request_init["firewall_policy_resource"] = { - "associations": [ - { - "attachment_target": "attachment_target_value", - "display_name": "display_name_value", - "firewall_policy_id": "firewall_policy_id_value", - "name": "name_value", - "short_name": "short_name_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "display_name": "display_name_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "parent": "parent_value", - "region": "region_value", - "rule_tuple_count": 1737, - "rules": [ - { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": [ - "dest_ip_ranges_value1", - "dest_ip_ranges_value2", - ], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": [ - "src_region_codes_value1", - "src_region_codes_value2", - ], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": [ - "target_resources_value1", - "target_resources_value2", - ], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } - ], - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "short_name": "short_name_value", + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.PatchRegionNetworkFirewallPolicyRequest, +): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "firewall_policy": "sample3", } request = request_type(**request_init) @@ -7526,8 +7707,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7644,6 +7826,81 @@ def test_patch_rule_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7677,8 +7934,9 @@ def test_patch_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7790,8 +8048,9 @@ def test_patch_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7897,57 +8156,6 @@ def test_patch_rule_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7994,8 +8202,9 @@ def test_patch_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8108,6 +8317,81 @@ def test_patch_rule_unary_rest(request_type): "target_service_accounts_value2", ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleRegionNetworkFirewallPolicyRequest.meta.fields[ + "firewall_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_policy_rule_resource"][field]) + ): + del request_init["firewall_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["firewall_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8141,8 +8425,9 @@ def test_patch_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8232,8 +8517,9 @@ def test_patch_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8339,57 +8625,6 @@ def test_patch_rule_unary_rest_bad_request( "region": "sample2", "firewall_policy": "sample3", } - request_init["firewall_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "direction": "direction_value", - "disabled": True, - "enable_logging": True, - "kind": "kind_value", - "match": { - "dest_address_groups": [ - "dest_address_groups_value1", - "dest_address_groups_value2", - ], - "dest_fqdns": ["dest_fqdns_value1", "dest_fqdns_value2"], - "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], - "dest_region_codes": [ - "dest_region_codes_value1", - "dest_region_codes_value2", - ], - "dest_threat_intelligences": [ - "dest_threat_intelligences_value1", - "dest_threat_intelligences_value2", - ], - "layer4_configs": [ - { - "ip_protocol": "ip_protocol_value", - "ports": ["ports_value1", "ports_value2"], - } - ], - "src_address_groups": [ - "src_address_groups_value1", - "src_address_groups_value2", - ], - "src_fqdns": ["src_fqdns_value1", "src_fqdns_value2"], - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], - "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], - "src_secure_tags": [{"name": "name_value", "state": "state_value"}], - "src_threat_intelligences": [ - "src_threat_intelligences_value1", - "src_threat_intelligences_value2", - ], - }, - "priority": 898, - "rule_name": "rule_name_value", - "rule_tuple_count": 1737, - "target_resources": ["target_resources_value1", "target_resources_value2"], - "target_secure_tags": {}, - "target_service_accounts": [ - "target_service_accounts_value1", - "target_service_accounts_value2", - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8436,8 +8671,9 @@ def test_patch_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8532,8 +8768,9 @@ def test_remove_association_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8644,8 +8881,9 @@ def test_remove_association_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8795,8 +9033,9 @@ def test_remove_association_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8888,8 +9127,9 @@ def test_remove_association_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8978,8 +9218,9 @@ def test_remove_association_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9129,8 +9370,9 @@ def test_remove_association_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9222,8 +9464,9 @@ def test_remove_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9334,8 +9577,9 @@ def test_remove_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9483,8 +9727,9 @@ def test_remove_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9576,8 +9821,9 @@ def test_remove_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9666,8 +9912,9 @@ def test_remove_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9815,8 +10062,9 @@ def test_remove_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9948,6 +10196,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionNetworkFirewallPolicyRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9962,8 +10285,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10049,8 +10373,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10147,83 +10472,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10270,8 +10518,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10332,6 +10581,83 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest.meta.fields[ + "test_permissions_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10344,8 +10670,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10429,8 +10756,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10531,9 +10859,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10580,8 +10905,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py b/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py index 18e92f4a..cca56e9e 100644 --- a/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py +++ b/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py @@ -646,8 +646,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -753,8 +754,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -897,8 +899,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -990,8 +993,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1075,8 +1079,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1219,8 +1224,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1297,8 +1303,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NotificationEndpoint.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NotificationEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1387,8 +1394,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NotificationEndpoint.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NotificationEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1532,8 +1540,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NotificationEndpoint.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NotificationEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1604,6 +1613,81 @@ def test_insert_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNotificationEndpointRequest.meta.fields[ + "notification_endpoint_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "notification_endpoint_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["notification_endpoint_resource"][field]) + ): + del request_init["notification_endpoint_resource"][field][i][ + subfield + ] + else: + del request_init["notification_endpoint_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1637,8 +1721,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1741,8 +1826,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1838,22 +1924,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["notification_endpoint_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_settings": { - "authority": "authority_value", - "endpoint": "endpoint_value", - "payload_name": "payload_name_value", - "resend_interval": {"nanos": 543, "seconds": 751}, - "retry_duration_sec": 1941, - }, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1895,8 +1965,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1969,6 +2040,81 @@ def test_insert_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNotificationEndpointRequest.meta.fields[ + "notification_endpoint_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "notification_endpoint_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["notification_endpoint_resource"][field]) + ): + del request_init["notification_endpoint_resource"][field][i][ + subfield + ] + else: + del request_init["notification_endpoint_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2002,8 +2148,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2084,8 +2231,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2181,22 +2329,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["notification_endpoint_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "grpc_settings": { - "authority": "authority_value", - "endpoint": "endpoint_value", - "payload_name": "payload_name_value", - "resend_interval": {"nanos": 543, "seconds": 751}, - "retry_duration_sec": 1941, - }, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2238,8 +2370,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2311,8 +2444,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NotificationEndpointList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NotificationEndpointList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2404,8 +2538,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NotificationEndpointList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NotificationEndpointList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2547,8 +2682,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.NotificationEndpointList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.NotificationEndpointList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_operations.py b/tests/unit/gapic/compute_v1/test_region_operations.py index 17577fd8..aba64499 100644 --- a/tests/unit/gapic/compute_v1/test_region_operations.py +++ b/tests/unit/gapic/compute_v1/test_region_operations.py @@ -596,8 +596,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteRegionOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteRegionOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -677,8 +678,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteRegionOperationR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteRegionOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteRegionOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -818,8 +820,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteRegionOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteRegionOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -907,8 +910,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1010,8 +1014,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionOperationRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1149,8 +1154,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1220,8 +1226,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1311,8 +1318,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionOperationsRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1454,8 +1462,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1597,8 +1606,9 @@ def test_wait_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1700,8 +1710,9 @@ def test_wait_rest_required_fields(request_type=compute.WaitRegionOperationReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1839,8 +1850,9 @@ def test_wait_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_security_policies.py b/tests/unit/gapic/compute_v1/test_region_security_policies.py index fafb145b..9477af68 100644 --- a/tests/unit/gapic/compute_v1/test_region_security_policies.py +++ b/tests/unit/gapic/compute_v1/test_region_security_policies.py @@ -640,8 +640,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -747,8 +748,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -890,8 +892,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -983,8 +986,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1211,8 +1216,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1292,8 +1298,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1383,8 +1390,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1528,8 +1536,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1681,6 +1690,77 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1714,8 +1794,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1823,8 +1904,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1924,103 +2006,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2066,8 +2051,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2225,6 +2211,77 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2258,8 +2315,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2345,8 +2403,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2446,103 +2505,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2588,8 +2550,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2664,8 +2627,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2756,8 +2720,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2899,8 +2864,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3110,6 +3076,77 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3143,8 +3180,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3251,8 +3289,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3352,103 +3391,6 @@ def test_patch_rest_bad_request( "region": "sample2", "security_policy": "sample3", } - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3499,8 +3441,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3663,6 +3606,77 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3696,8 +3710,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3782,8 +3797,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3883,103 +3899,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "security_policy": "sample3", } - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4030,8 +3949,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py b/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py index 4e1993ae..f2821b78 100644 --- a/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py +++ b/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py @@ -638,8 +638,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -745,8 +746,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -888,8 +890,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -981,8 +984,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1066,8 +1070,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1209,8 +1214,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1292,8 +1298,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1385,8 +1392,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSslCertificateRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1530,8 +1538,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1612,6 +1621,77 @@ def test_insert_rest(request_type): ], "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSslCertificateRequest.meta.fields[ + "ssl_certificate_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_certificate_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_certificate_resource"][field])): + del request_init["ssl_certificate_resource"][field][i][subfield] + else: + del request_init["ssl_certificate_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1645,8 +1725,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1749,8 +1830,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1845,32 +1927,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["ssl_certificate_resource"] = { - "certificate": "certificate_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "expire_time": "expire_time_value", - "id": 205, - "kind": "kind_value", - "managed": { - "domain_status": {}, - "domains": ["domains_value1", "domains_value2"], - "status": "status_value", - }, - "name": "name_value", - "private_key": "private_key_value", - "region": "region_value", - "self_link": "self_link_value", - "self_managed": { - "certificate": "certificate_value", - "private_key": "private_key_value", - }, - "subject_alternative_names": [ - "subject_alternative_names_value1", - "subject_alternative_names_value2", - ], - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1912,8 +1968,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1996,6 +2053,77 @@ def test_insert_unary_rest(request_type): ], "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSslCertificateRequest.meta.fields[ + "ssl_certificate_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_certificate_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_certificate_resource"][field])): + del request_init["ssl_certificate_resource"][field][i][subfield] + else: + del request_init["ssl_certificate_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2029,8 +2157,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2111,8 +2240,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2207,32 +2337,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["ssl_certificate_resource"] = { - "certificate": "certificate_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "expire_time": "expire_time_value", - "id": 205, - "kind": "kind_value", - "managed": { - "domain_status": {}, - "domains": ["domains_value1", "domains_value2"], - "status": "status_value", - }, - "name": "name_value", - "private_key": "private_key_value", - "region": "region_value", - "self_link": "self_link_value", - "self_managed": { - "certificate": "certificate_value", - "private_key": "private_key_value", - }, - "subject_alternative_names": [ - "subject_alternative_names_value1", - "subject_alternative_names_value2", - ], - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2274,8 +2378,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2347,8 +2452,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2440,8 +2546,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2583,8 +2690,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_ssl_policies.py b/tests/unit/gapic/compute_v1/test_region_ssl_policies.py index 358fa711..9b771eac 100644 --- a/tests/unit/gapic/compute_v1/test_region_ssl_policies.py +++ b/tests/unit/gapic/compute_v1/test_region_ssl_policies.py @@ -626,8 +626,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -731,8 +732,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteRegionSslPolicyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -870,8 +872,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -959,8 +962,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1044,8 +1048,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1183,8 +1188,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1262,8 +1268,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1355,8 +1362,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSslPolicyRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1494,8 +1502,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1571,6 +1580,73 @@ def test_insert_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1604,8 +1680,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1706,8 +1783,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertRegionSslPolicyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1802,27 +1880,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1864,8 +1921,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1943,6 +2001,73 @@ def test_insert_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1976,8 +2101,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2058,8 +2184,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2154,27 +2281,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2216,8 +2322,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2289,8 +2396,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2380,8 +2488,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionSslPoliciesReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2523,8 +2632,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2645,10 +2755,9 @@ def test_list_available_features_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2737,10 +2846,11 @@ def test_list_available_features_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( + # Convert return value to protobuf type + return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2885,10 +2995,9 @@ def test_list_available_features_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2963,6 +3072,73 @@ def test_patch_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2996,8 +3172,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3102,8 +3279,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchRegionSslPolicyReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3199,27 +3377,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "ssl_policy": "sample3"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3266,8 +3423,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3346,6 +3504,73 @@ def test_patch_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3379,8 +3604,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3465,8 +3691,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3562,27 +3789,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "ssl_policy": "sample3"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3629,8 +3835,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py b/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py index 6e0493d9..7c4c3199 100644 --- a/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py +++ b/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py @@ -640,8 +640,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -747,8 +748,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -890,8 +892,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -983,8 +986,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1211,8 +1216,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1293,8 +1299,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1385,8 +1392,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionTargetHttpProxyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1530,8 +1538,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1599,6 +1608,79 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionTargetHttpProxyRequest.meta.fields[ + "target_http_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_http_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_http_proxy_resource"][field]) + ): + del request_init["target_http_proxy_resource"][field][i][subfield] + else: + del request_init["target_http_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1632,8 +1714,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1736,8 +1819,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1832,19 +1916,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_http_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "region": "region_value", - "self_link": "self_link_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1886,8 +1957,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1957,6 +2029,79 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionTargetHttpProxyRequest.meta.fields[ + "target_http_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_http_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_http_proxy_resource"][field]) + ): + del request_init["target_http_proxy_resource"][field][i][subfield] + else: + del request_init["target_http_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1990,8 +2135,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2072,8 +2218,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2168,19 +2315,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_http_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "region": "region_value", - "self_link": "self_link_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2222,8 +2356,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2295,8 +2430,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2388,8 +2524,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2531,8 +2668,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2646,6 +2784,79 @@ def test_set_url_map_rest(request_type): "target_http_proxy": "sample3", } request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapRegionTargetHttpProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2679,8 +2890,9 @@ def test_set_url_map_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2787,8 +2999,9 @@ def test_set_url_map_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2888,7 +3101,6 @@ def test_set_url_map_rest_bad_request( "region": "sample2", "target_http_proxy": "sample3", } - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2933,8 +3145,9 @@ def test_set_url_map_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2995,6 +3208,79 @@ def test_set_url_map_unary_rest(request_type): "target_http_proxy": "sample3", } request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapRegionTargetHttpProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3028,8 +3314,9 @@ def test_set_url_map_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3114,8 +3401,9 @@ def test_set_url_map_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3215,7 +3503,6 @@ def test_set_url_map_unary_rest_bad_request( "region": "sample2", "target_http_proxy": "sample3", } - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3260,8 +3547,9 @@ def test_set_url_map_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py b/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py index a0fb3e89..32123dc9 100644 --- a/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py +++ b/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py @@ -644,8 +644,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -751,8 +752,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -894,8 +896,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -987,8 +990,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1072,8 +1076,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1215,8 +1220,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1303,8 +1309,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1403,8 +1410,9 @@ def test_get_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1548,8 +1556,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1623,6 +1632,79 @@ def test_insert_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1656,8 +1738,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1760,8 +1843,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1856,25 +1940,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1916,8 +1981,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1993,6 +2059,79 @@ def test_insert_unary_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2026,8 +2165,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2108,8 +2248,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2204,25 +2345,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2264,8 +2386,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2337,8 +2460,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2430,8 +2554,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2573,8 +2698,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2706,6 +2832,79 @@ def test_patch_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2739,8 +2938,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2847,8 +3047,9 @@ def test_patch_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2948,25 +3149,6 @@ def test_patch_rest_bad_request( "region": "sample2", "target_https_proxy": "sample3", } - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3013,8 +3195,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3095,6 +3278,79 @@ def test_patch_unary_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3128,8 +3384,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3214,8 +3471,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3315,25 +3573,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "target_https_proxy": "sample3", } - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3380,8 +3619,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3446,6 +3686,88 @@ def test_set_ssl_certificates_rest(request_type): request_init[ "region_target_https_proxies_set_ssl_certificates_request_resource" ] = {"ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.meta.fields[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ][field] + ), + ): + del request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3479,8 +3801,9 @@ def test_set_ssl_certificates_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3587,8 +3910,9 @@ def test_set_ssl_certificates_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3689,9 +4013,6 @@ def test_set_ssl_certificates_rest_bad_request( "region": "sample2", "target_https_proxy": "sample3", } - request_init[ - "region_target_https_proxies_set_ssl_certificates_request_resource" - ] = {"ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"]} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3738,8 +4059,9 @@ def test_set_ssl_certificates_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3804,6 +4126,88 @@ def test_set_ssl_certificates_unary_rest(request_type): request_init[ "region_target_https_proxies_set_ssl_certificates_request_resource" ] = {"ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.meta.fields[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ][field] + ), + ): + del request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3837,8 +4241,9 @@ def test_set_ssl_certificates_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3923,8 +4328,9 @@ def test_set_ssl_certificates_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4025,9 +4431,6 @@ def test_set_ssl_certificates_unary_rest_bad_request( "region": "sample2", "target_https_proxy": "sample3", } - request_init[ - "region_target_https_proxies_set_ssl_certificates_request_resource" - ] = {"ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"]} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4074,8 +4477,9 @@ def test_set_ssl_certificates_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4138,6 +4542,79 @@ def test_set_url_map_rest(request_type): "target_https_proxy": "sample3", } request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapRegionTargetHttpsProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4171,8 +4648,9 @@ def test_set_url_map_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4279,8 +4757,9 @@ def test_set_url_map_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4380,7 +4859,6 @@ def test_set_url_map_rest_bad_request( "region": "sample2", "target_https_proxy": "sample3", } - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4425,8 +4903,9 @@ def test_set_url_map_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4487,6 +4966,79 @@ def test_set_url_map_unary_rest(request_type): "target_https_proxy": "sample3", } request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapRegionTargetHttpsProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4520,8 +5072,9 @@ def test_set_url_map_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4606,8 +5159,9 @@ def test_set_url_map_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4707,7 +5261,6 @@ def test_set_url_map_unary_rest_bad_request( "region": "sample2", "target_https_proxy": "sample3", } - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4752,8 +5305,9 @@ def test_set_url_map_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py b/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py index 35e8b75d..ab1cd5e1 100644 --- a/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py +++ b/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py @@ -640,8 +640,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -747,8 +748,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -890,8 +892,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -983,8 +986,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1211,8 +1216,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1292,8 +1298,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1383,8 +1390,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionTargetTcpProxyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1528,8 +1536,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1596,6 +1605,79 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "service": "service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_tcp_proxy_resource"][field]) + ): + del request_init["target_tcp_proxy_resource"][field][i][subfield] + else: + del request_init["target_tcp_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1629,8 +1711,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1733,8 +1816,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1829,18 +1913,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_tcp_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "proxy_header": "proxy_header_value", - "region": "region_value", - "self_link": "self_link_value", - "service": "service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1882,8 +1954,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1952,6 +2025,79 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "service": "service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_tcp_proxy_resource"][field]) + ): + del request_init["target_tcp_proxy_resource"][field][i][subfield] + else: + del request_init["target_tcp_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1985,8 +2131,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2067,8 +2214,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2163,18 +2311,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_tcp_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "proxy_header": "proxy_header_value", - "region": "region_value", - "self_link": "self_link_value", - "service": "service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2216,8 +2352,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2289,8 +2426,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2382,8 +2520,9 @@ def test_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2525,8 +2664,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_region_url_maps.py b/tests/unit/gapic/compute_v1/test_region_url_maps.py index 614b7ef5..6a30d297 100644 --- a/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -606,8 +606,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -711,8 +712,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteRegionUrlMapRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -850,8 +852,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -939,8 +942,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1024,8 +1028,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1163,8 +1168,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1239,8 +1245,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMap.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1329,8 +1336,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionUrlMapRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMap.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1466,8 +1474,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMap.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1693,6 +1702,73 @@ def test_insert_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1726,8 +1802,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1828,8 +1905,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertRegionUrlMapRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1924,177 +2002,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2136,8 +2043,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2365,6 +2273,73 @@ def test_insert_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2398,8 +2373,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2480,8 +2456,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2576,177 +2553,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2788,8 +2594,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2861,8 +2668,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2952,8 +2760,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionUrlMapsRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3093,8 +2902,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3374,6 +3184,73 @@ def test_patch_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3407,8 +3284,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3513,8 +3391,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchRegionUrlMapReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3608,6 +3487,112 @@ def test_patch_rest_bad_request( transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "url_map": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + url_map="url_map_value", + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionUrlMapRequest(), + project="project_value", + region="region_value", + url_map="url_map_value", + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_patch_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRegionUrlMapRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} request_init["url_map_resource"] = { @@ -3781,283 +3766,74 @@ def test_patch_rest_bad_request( } ], } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.patch(request) + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) -def test_patch_rest_flattened(): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation() - - # get arguments that satisfy an http rule for this method - sample_request = { - "project": "sample1", - "region": "sample2", - "url_map": "sample3", - } - - # get truthy value for each flattened field - mock_args = dict( - project="project_value", - region="region_value", - url_map="url_map_value", - url_map_resource=compute.UrlMap( - creation_timestamp="creation_timestamp_value" - ), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.patch(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" - % client.transport._host, - args[1], - ) - - -def test_patch_rest_flattened_error(transport: str = "rest"): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.patch( - compute.PatchRegionUrlMapRequest(), - project="project_value", - region="region_value", - url_map="url_map_value", - url_map_resource=compute.UrlMap( - creation_timestamp="creation_timestamp_value" - ), - ) - - -def test_patch_rest_error(): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - compute.PatchRegionUrlMapRequest, - dict, - ], -) -def test_patch_unary_rest(request_type): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } - request = request_type(**request_init) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] + request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: @@ -4090,8 +3866,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4176,8 +3953,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4204,246 +3982,75 @@ def test_patch_unary_rest_unset_required_fields(): "urlMap", "urlMapResource", ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): - transport = transports.RegionUrlMapsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RegionUrlMapsRestInterceptor(), - ) - client = RegionUrlMapsClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RegionUrlMapsRestInterceptor, "post_patch" - ) as post, mock.patch.object( - transports.RegionUrlMapsRestInterceptor, "pre_patch" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = compute.PatchRegionUrlMapRequest.pb( - compute.PatchRegionUrlMapRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.PatchRegionUrlMapRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.patch_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchRegionUrlMapRequest -): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionUrlMapRequest.pb( + compute.PatchRegionUrlMapRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionUrlMapRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4490,8 +4097,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4720,6 +4328,73 @@ def test_update_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4753,8 +4428,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4859,8 +4535,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateRegionUrlMapRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4947,186 +4624,15 @@ def test_update_rest_interceptors(null_interceptor): def test_update_rest_bad_request( - transport: str = "rest", request_type=compute.UpdateRegionUrlMapRequest -): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } + transport: str = "rest", request_type=compute.UpdateRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5173,8 +4679,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5403,6 +4910,73 @@ def test_update_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRegionUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5436,8 +5010,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5522,8 +5097,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5550,246 +5126,75 @@ def test_update_unary_rest_unset_required_fields(): "urlMap", "urlMapResource", ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_unary_rest_interceptors(null_interceptor): - transport = transports.RegionUrlMapsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RegionUrlMapsRestInterceptor(), - ) - client = RegionUrlMapsClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RegionUrlMapsRestInterceptor, "post_update" - ) as post, mock.patch.object( - transports.RegionUrlMapsRestInterceptor, "pre_update" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = compute.UpdateRegionUrlMapRequest.pb( - compute.UpdateRegionUrlMapRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.UpdateRegionUrlMapRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.update_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_unary_rest_bad_request( - transport: str = "rest", request_type=compute.UpdateRegionUrlMapRequest -): - client = RegionUrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_update" + ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "pre_update" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionUrlMapRequest.pb( + compute.UpdateRegionUrlMapRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionUrlMapRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5836,8 +5241,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6071,6 +5477,86 @@ def test_validate_rest(request_type): ], } } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ValidateRegionUrlMapRequest.meta.fields[ + "region_url_maps_validate_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_url_maps_validate_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["region_url_maps_validate_request_resource"][field] + ), + ): + del request_init["region_url_maps_validate_request_resource"][ + field + ][i][subfield] + else: + del request_init["region_url_maps_validate_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6081,8 +5567,9 @@ def test_validate_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6165,8 +5652,9 @@ def test_validate_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6264,182 +5752,6 @@ def test_validate_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} - request_init["region_url_maps_validate_request_resource"] = { - "resource": { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": [ - "expose_headers_value1", - "expose_headers_value2", - ], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6486,8 +5798,9 @@ def test_validate_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_regions.py b/tests/unit/gapic/compute_v1/test_regions.py index b87e00b4..46b409c4 100644 --- a/tests/unit/gapic/compute_v1/test_regions.py +++ b/tests/unit/gapic/compute_v1/test_regions.py @@ -566,8 +566,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Region.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -652,8 +653,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Region.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -781,8 +783,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Region.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Region.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -851,8 +854,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -938,8 +942,9 @@ def test_list_rest_required_fields(request_type=compute.ListRegionsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1069,8 +1074,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RegionList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RegionList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_reservations.py b/tests/unit/gapic/compute_v1/test_reservations.py index cf7f39d0..cc13e853 100644 --- a/tests/unit/gapic/compute_v1/test_reservations.py +++ b/tests/unit/gapic/compute_v1/test_reservations.py @@ -580,8 +580,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ReservationAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ReservationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -671,8 +672,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ReservationAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ReservationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -809,8 +811,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ReservationAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ReservationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -963,8 +966,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteReservationReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1207,8 +1212,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1296,8 +1302,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1381,8 +1388,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1520,8 +1528,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1598,8 +1607,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1690,8 +1700,9 @@ def test_get_rest_required_fields(request_type=compute.GetReservationRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1827,8 +1838,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1897,8 +1909,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1985,8 +1998,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2124,8 +2138,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2218,6 +2233,75 @@ def test_insert_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertReservationRequest.meta.fields["reservation_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "reservation_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reservation_resource"][field])): + del request_init["reservation_resource"][field][i][subfield] + else: + del request_init["reservation_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2251,8 +2335,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2353,8 +2438,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertReservationReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2449,44 +2535,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["reservation_resource"] = { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [{"disk_size_gb": 1261, "interface": "interface_value"}], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2526,8 +2574,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2620,6 +2669,75 @@ def test_insert_unary_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertReservationRequest.meta.fields["reservation_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "reservation_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reservation_resource"][field])): + del request_init["reservation_resource"][field][i][subfield] + else: + del request_init["reservation_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2653,8 +2771,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2735,8 +2854,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2831,44 +2951,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["reservation_resource"] = { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [{"disk_size_gb": 1261, "interface": "interface_value"}], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2908,8 +2990,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2979,8 +3062,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ReservationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ReservationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3070,8 +3154,9 @@ def test_list_rest_required_fields(request_type=compute.ListReservationsRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ReservationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ReservationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3213,8 +3298,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ReservationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ReservationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3324,6 +3410,83 @@ def test_resize_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} request_init["reservations_resize_request_resource"] = {"specific_sku_count": 1920} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ResizeReservationRequest.meta.fields[ + "reservations_resize_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "reservations_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["reservations_resize_request_resource"][field]) + ): + del request_init["reservations_resize_request_resource"][field][i][ + subfield + ] + else: + del request_init["reservations_resize_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3357,8 +3520,9 @@ def test_resize_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3463,8 +3627,9 @@ def test_resize_rest_required_fields(request_type=compute.ResizeReservationReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3560,7 +3725,6 @@ def test_resize_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} - request_init["reservations_resize_request_resource"] = {"specific_sku_count": 1920} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3607,8 +3771,9 @@ def test_resize_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3667,6 +3832,83 @@ def test_resize_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} request_init["reservations_resize_request_resource"] = {"specific_sku_count": 1920} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ResizeReservationRequest.meta.fields[ + "reservations_resize_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "reservations_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["reservations_resize_request_resource"][field]) + ): + del request_init["reservations_resize_request_resource"][field][i][ + subfield + ] + else: + del request_init["reservations_resize_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3700,8 +3942,9 @@ def test_resize_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3786,8 +4029,9 @@ def test_resize_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3883,7 +4127,6 @@ def test_resize_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} - request_init["reservations_resize_request_resource"] = {"specific_sku_count": 1920} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3930,8 +4173,9 @@ def test_resize_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4066,6 +4310,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyReservationRequest.meta.fields[ + "zone_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_policy_request_resource"][field]) + ): + del request_init["zone_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4080,8 +4399,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4167,8 +4487,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4264,83 +4585,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4387,8 +4631,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4449,6 +4694,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsReservationRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4461,8 +4781,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4546,8 +4867,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4645,9 +4967,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4694,8 +5013,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4791,6 +5111,75 @@ def test_update_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateReservationRequest.meta.fields["reservation_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "reservation_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reservation_resource"][field])): + del request_init["reservation_resource"][field][i][subfield] + else: + del request_init["reservation_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4824,8 +5213,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4936,8 +5326,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateReservationReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5039,44 +5430,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} - request_init["reservation_resource"] = { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [{"disk_size_gb": 1261, "interface": "interface_value"}], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5121,8 +5474,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5216,6 +5570,75 @@ def test_update_unary_rest(request_type): "status": "status_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateReservationRequest.meta.fields["reservation_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "reservation_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reservation_resource"][field])): + del request_init["reservation_resource"][field][i][subfield] + else: + del request_init["reservation_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5249,8 +5672,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5341,8 +5765,9 @@ def test_update_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5444,44 +5869,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} - request_init["reservation_resource"] = { - "commitment": "commitment_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "resource_policies": {}, - "resource_status": { - "specific_sku_allocation": { - "source_instance_template_id": "source_instance_template_id_value" - } - }, - "satisfies_pzs": True, - "self_link": "self_link_value", - "share_settings": {"project_map": {}, "share_type": "share_type_value"}, - "specific_reservation": { - "assured_count": 1407, - "count": 553, - "in_use_count": 1291, - "instance_properties": { - "guest_accelerators": [ - { - "accelerator_count": 1805, - "accelerator_type": "accelerator_type_value", - } - ], - "local_ssds": [{"disk_size_gb": 1261, "interface": "interface_value"}], - "location_hint": "location_hint_value", - "machine_type": "machine_type_value", - "min_cpu_platform": "min_cpu_platform_value", - }, - "source_instance_template": "source_instance_template_value", - }, - "specific_reservation_required": True, - "status": "status_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5526,8 +5913,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_resource_policies.py b/tests/unit/gapic/compute_v1/test_resource_policies.py index d6d7e375..512e7ff4 100644 --- a/tests/unit/gapic/compute_v1/test_resource_policies.py +++ b/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -610,8 +610,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -702,8 +703,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -840,8 +842,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1000,8 +1003,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1105,8 +1109,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteResourcePolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1248,8 +1253,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1341,8 +1347,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1426,8 +1433,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1569,8 +1577,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1648,8 +1657,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1737,8 +1747,9 @@ def test_get_rest_required_fields(request_type=compute.GetResourcePolicyRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1882,8 +1893,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1952,8 +1964,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2040,8 +2053,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2179,8 +2193,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2300,6 +2315,77 @@ def test_insert_rest(request_type): }, "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertResourcePolicyRequest.meta.fields[ + "resource_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "resource_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["resource_policy_resource"][field])): + del request_init["resource_policy_resource"][field][i][subfield] + else: + del request_init["resource_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2333,8 +2419,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2435,8 +2522,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertResourcePolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2531,71 +2619,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["resource_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_consistency_group_policy": {}, - "group_placement_policy": { - "availability_domain_count": 2650, - "collocation": "collocation_value", - "vm_count": 875, - }, - "id": 205, - "instance_schedule_policy": { - "expiration_time": "expiration_time_value", - "start_time": "start_time_value", - "time_zone": "time_zone_value", - "vm_start_schedule": {"schedule": "schedule_value"}, - "vm_stop_schedule": {}, - }, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "resource_status": { - "instance_schedule_policy": { - "last_run_start_time": "last_run_start_time_value", - "next_run_start_time": "next_run_start_time_value", - } - }, - "self_link": "self_link_value", - "snapshot_schedule_policy": { - "retention_policy": { - "max_retention_days": 1933, - "on_source_disk_delete": "on_source_disk_delete_value", - }, - "schedule": { - "daily_schedule": { - "days_in_cycle": 1366, - "duration": "duration_value", - "start_time": "start_time_value", - }, - "hourly_schedule": { - "duration": "duration_value", - "hours_in_cycle": 1494, - "start_time": "start_time_value", - }, - "weekly_schedule": { - "day_of_weeks": [ - { - "day": "day_value", - "duration": "duration_value", - "start_time": "start_time_value", - } - ] - }, - }, - "snapshot_properties": { - "chain_name": "chain_name_value", - "guest_flush": True, - "labels": {}, - "storage_locations": [ - "storage_locations_value1", - "storage_locations_value2", - ], - }, - }, - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2637,8 +2660,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2760,6 +2784,77 @@ def test_insert_unary_rest(request_type): }, "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertResourcePolicyRequest.meta.fields[ + "resource_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "resource_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["resource_policy_resource"][field])): + del request_init["resource_policy_resource"][field][i][subfield] + else: + del request_init["resource_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2793,8 +2888,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2875,8 +2971,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2971,71 +3068,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["resource_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_consistency_group_policy": {}, - "group_placement_policy": { - "availability_domain_count": 2650, - "collocation": "collocation_value", - "vm_count": 875, - }, - "id": 205, - "instance_schedule_policy": { - "expiration_time": "expiration_time_value", - "start_time": "start_time_value", - "time_zone": "time_zone_value", - "vm_start_schedule": {"schedule": "schedule_value"}, - "vm_stop_schedule": {}, - }, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "resource_status": { - "instance_schedule_policy": { - "last_run_start_time": "last_run_start_time_value", - "next_run_start_time": "next_run_start_time_value", - } - }, - "self_link": "self_link_value", - "snapshot_schedule_policy": { - "retention_policy": { - "max_retention_days": 1933, - "on_source_disk_delete": "on_source_disk_delete_value", - }, - "schedule": { - "daily_schedule": { - "days_in_cycle": 1366, - "duration": "duration_value", - "start_time": "start_time_value", - }, - "hourly_schedule": { - "duration": "duration_value", - "hours_in_cycle": 1494, - "start_time": "start_time_value", - }, - "weekly_schedule": { - "day_of_weeks": [ - { - "day": "day_value", - "duration": "duration_value", - "start_time": "start_time_value", - } - ] - }, - }, - "snapshot_properties": { - "chain_name": "chain_name_value", - "guest_flush": True, - "labels": {}, - "storage_locations": [ - "storage_locations_value1", - "storage_locations_value2", - ], - }, - }, - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3077,8 +3109,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3151,8 +3184,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3243,8 +3277,9 @@ def test_list_rest_required_fields(request_type=compute.ListResourcePoliciesRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3386,8 +3421,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ResourcePolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ResourcePolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3565,6 +3601,77 @@ def test_patch_rest(request_type): }, "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchResourcePolicyRequest.meta.fields[ + "resource_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "resource_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["resource_policy_resource"][field])): + del request_init["resource_policy_resource"][field][i][subfield] + else: + del request_init["resource_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3598,8 +3705,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3709,8 +3817,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchResourcePolicyRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3815,71 +3924,6 @@ def test_patch_rest_bad_request( "region": "sample2", "resource_policy": "sample3", } - request_init["resource_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_consistency_group_policy": {}, - "group_placement_policy": { - "availability_domain_count": 2650, - "collocation": "collocation_value", - "vm_count": 875, - }, - "id": 205, - "instance_schedule_policy": { - "expiration_time": "expiration_time_value", - "start_time": "start_time_value", - "time_zone": "time_zone_value", - "vm_start_schedule": {"schedule": "schedule_value"}, - "vm_stop_schedule": {}, - }, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "resource_status": { - "instance_schedule_policy": { - "last_run_start_time": "last_run_start_time_value", - "next_run_start_time": "next_run_start_time_value", - } - }, - "self_link": "self_link_value", - "snapshot_schedule_policy": { - "retention_policy": { - "max_retention_days": 1933, - "on_source_disk_delete": "on_source_disk_delete_value", - }, - "schedule": { - "daily_schedule": { - "days_in_cycle": 1366, - "duration": "duration_value", - "start_time": "start_time_value", - }, - "hourly_schedule": { - "duration": "duration_value", - "hours_in_cycle": 1494, - "start_time": "start_time_value", - }, - "weekly_schedule": { - "day_of_weeks": [ - { - "day": "day_value", - "duration": "duration_value", - "start_time": "start_time_value", - } - ] - }, - }, - "snapshot_properties": { - "chain_name": "chain_name_value", - "guest_flush": True, - "labels": {}, - "storage_locations": [ - "storage_locations_value1", - "storage_locations_value2", - ], - }, - }, - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3926,8 +3970,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4054,6 +4099,77 @@ def test_patch_unary_rest(request_type): }, "status": "status_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchResourcePolicyRequest.meta.fields[ + "resource_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "resource_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["resource_policy_resource"][field])): + del request_init["resource_policy_resource"][field][i][subfield] + else: + del request_init["resource_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4087,8 +4203,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4178,8 +4295,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4284,71 +4402,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "resource_policy": "sample3", } - request_init["resource_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_consistency_group_policy": {}, - "group_placement_policy": { - "availability_domain_count": 2650, - "collocation": "collocation_value", - "vm_count": 875, - }, - "id": 205, - "instance_schedule_policy": { - "expiration_time": "expiration_time_value", - "start_time": "start_time_value", - "time_zone": "time_zone_value", - "vm_start_schedule": {"schedule": "schedule_value"}, - "vm_stop_schedule": {}, - }, - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "resource_status": { - "instance_schedule_policy": { - "last_run_start_time": "last_run_start_time_value", - "next_run_start_time": "next_run_start_time_value", - } - }, - "self_link": "self_link_value", - "snapshot_schedule_policy": { - "retention_policy": { - "max_retention_days": 1933, - "on_source_disk_delete": "on_source_disk_delete_value", - }, - "schedule": { - "daily_schedule": { - "days_in_cycle": 1366, - "duration": "duration_value", - "start_time": "start_time_value", - }, - "hourly_schedule": { - "duration": "duration_value", - "hours_in_cycle": 1494, - "start_time": "start_time_value", - }, - "weekly_schedule": { - "day_of_weeks": [ - { - "day": "day_value", - "duration": "duration_value", - "start_time": "start_time_value", - } - ] - }, - }, - "snapshot_properties": { - "chain_name": "chain_name_value", - "guest_flush": True, - "labels": {}, - "storage_locations": [ - "storage_locations_value1", - "storage_locations_value2", - ], - }, - }, - "status": "status_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4395,8 +4448,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4531,6 +4585,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyResourcePolicyRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4545,8 +4674,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4632,8 +4762,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4729,83 +4860,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4852,8 +4906,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4914,6 +4969,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsResourcePolicyRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4926,8 +5056,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5011,8 +5142,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5111,9 +5243,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5160,8 +5289,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_routers.py b/tests/unit/gapic/compute_v1/test_routers.py index b8c780cb..0443c6e0 100644 --- a/tests/unit/gapic/compute_v1/test_routers.py +++ b/tests/unit/gapic/compute_v1/test_routers.py @@ -569,8 +569,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -660,8 +661,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -796,8 +798,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -947,8 +950,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1052,8 +1056,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteRouterRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1187,8 +1192,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1276,8 +1282,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1359,8 +1366,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteRouterRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1494,8 +1502,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1570,8 +1579,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Router.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Router.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1660,8 +1670,9 @@ def test_get_rest_required_fields(request_type=compute.GetRouterRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Router.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Router.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1795,8 +1806,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Router.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Router.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1866,8 +1878,9 @@ def test_get_nat_mapping_info_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VmEndpointNatMappingsList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VmEndpointNatMappingsList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1964,8 +1977,9 @@ def test_get_nat_mapping_info_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VmEndpointNatMappingsList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VmEndpointNatMappingsList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2112,8 +2126,9 @@ def test_get_nat_mapping_info_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VmEndpointNatMappingsList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VmEndpointNatMappingsList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2239,8 +2254,9 @@ def test_get_router_status_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterStatusResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2323,8 +2339,9 @@ def test_get_router_status_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterStatusResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2462,8 +2479,9 @@ def test_get_router_status_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterStatusResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2636,6 +2654,73 @@ def test_insert_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2669,8 +2754,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2771,8 +2857,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertRouterRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2863,124 +2950,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3022,8 +2991,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3198,6 +3168,73 @@ def test_insert_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3231,8 +3268,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3311,8 +3349,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertRouterRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3403,124 +3442,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3562,8 +3483,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3635,8 +3557,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3726,8 +3649,9 @@ def test_list_rest_required_fields(request_type=compute.ListRoutersRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3863,8 +3787,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouterList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouterList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4091,6 +4016,73 @@ def test_patch_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4124,8 +4116,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4230,8 +4223,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchRouterRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4323,124 +4317,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4487,8 +4363,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4664,6 +4541,73 @@ def test_patch_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4697,8 +4641,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4781,8 +4726,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchRouterReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4874,124 +4820,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5038,8 +4866,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5215,6 +5044,73 @@ def test_preview_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PreviewRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5225,8 +5121,9 @@ def test_preview_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RoutersPreviewResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RoutersPreviewResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5307,8 +5204,9 @@ def test_preview_rest_required_fields(request_type=compute.PreviewRouterRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RoutersPreviewResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RoutersPreviewResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5392,134 +5290,16 @@ def test_preview_rest_interceptors(null_interceptor): post.assert_called_once() -def test_preview_rest_bad_request( - transport: str = "rest", request_type=compute.PreviewRouterRequest -): - client = RoutersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } +def test_preview_rest_bad_request( + transport: str = "rest", request_type=compute.PreviewRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5566,8 +5346,9 @@ def test_preview_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RoutersPreviewResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RoutersPreviewResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5743,6 +5524,73 @@ def test_update_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5776,8 +5624,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5882,8 +5731,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateRouterRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5975,124 +5825,6 @@ def test_update_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6139,8 +5871,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6316,6 +6049,73 @@ def test_update_unary_rest(request_type): "region": "region_value", "self_link": "self_link_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateRouterRequest.meta.fields["router_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["router_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["router_resource"][field])): + del request_init["router_resource"][field][i][subfield] + else: + del request_init["router_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6349,8 +6149,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6433,8 +6234,9 @@ def test_update_unary_rest_required_fields(request_type=compute.UpdateRouterRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6526,124 +6328,6 @@ def test_update_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} - request_init["router_resource"] = { - "bgp": { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": [ - {"description": "description_value", "range_": "range__value"} - ], - "asn": 322, - "keepalive_interval": 1914, - }, - "bgp_peers": [ - { - "advertise_mode": "advertise_mode_value", - "advertised_groups": [ - "advertised_groups_value1", - "advertised_groups_value2", - ], - "advertised_ip_ranges": {}, - "advertised_route_priority": 2714, - "bfd": { - "min_receive_interval": 2122, - "min_transmit_interval": 2265, - "multiplier": 1095, - "session_initialization_mode": "session_initialization_mode_value", - }, - "custom_learned_ip_ranges": [{"range_": "range__value"}], - "custom_learned_route_priority": 3140, - "enable": "enable_value", - "enable_ipv6": True, - "interface_name": "interface_name_value", - "ip_address": "ip_address_value", - "ipv6_nexthop_address": "ipv6_nexthop_address_value", - "management_type": "management_type_value", - "md5_authentication_key_name": "md5_authentication_key_name_value", - "name": "name_value", - "peer_asn": 845, - "peer_ip_address": "peer_ip_address_value", - "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", - "router_appliance_instance": "router_appliance_instance_value", - } - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "encrypted_interconnect_router": True, - "id": 205, - "interfaces": [ - { - "ip_range": "ip_range_value", - "linked_interconnect_attachment": "linked_interconnect_attachment_value", - "linked_vpn_tunnel": "linked_vpn_tunnel_value", - "management_type": "management_type_value", - "name": "name_value", - "private_ip_address": "private_ip_address_value", - "redundant_interface": "redundant_interface_value", - "subnetwork": "subnetwork_value", - } - ], - "kind": "kind_value", - "md5_authentication_keys": [{"key": "key_value", "name": "name_value"}], - "name": "name_value", - "nats": [ - { - "auto_network_tier": "auto_network_tier_value", - "drain_nat_ips": ["drain_nat_ips_value1", "drain_nat_ips_value2"], - "enable_dynamic_port_allocation": True, - "enable_endpoint_independent_mapping": True, - "endpoint_types": ["endpoint_types_value1", "endpoint_types_value2"], - "icmp_idle_timeout_sec": 2214, - "log_config": {"enable": True, "filter": "filter_value"}, - "max_ports_per_vm": 1733, - "min_ports_per_vm": 1731, - "name": "name_value", - "nat_ip_allocate_option": "nat_ip_allocate_option_value", - "nat_ips": ["nat_ips_value1", "nat_ips_value2"], - "rules": [ - { - "action": { - "source_nat_active_ips": [ - "source_nat_active_ips_value1", - "source_nat_active_ips_value2", - ], - "source_nat_drain_ips": [ - "source_nat_drain_ips_value1", - "source_nat_drain_ips_value2", - ], - }, - "description": "description_value", - "match": "match_value", - "rule_number": 1184, - } - ], - "source_subnetwork_ip_ranges_to_nat": "source_subnetwork_ip_ranges_to_nat_value", - "subnetworks": [ - { - "name": "name_value", - "secondary_ip_range_names": [ - "secondary_ip_range_names_value1", - "secondary_ip_range_names_value2", - ], - "source_ip_ranges_to_nat": [ - "source_ip_ranges_to_nat_value1", - "source_ip_ranges_to_nat_value2", - ], - } - ], - "tcp_established_idle_timeout_sec": 3371, - "tcp_time_wait_timeout_sec": 2665, - "tcp_transitory_idle_timeout_sec": 3330, - "udp_idle_timeout_sec": 2118, - } - ], - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6690,8 +6374,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_routes.py b/tests/unit/gapic/compute_v1/test_routes.py index 1f3277fa..d9957502 100644 --- a/tests/unit/gapic/compute_v1/test_routes.py +++ b/tests/unit/gapic/compute_v1/test_routes.py @@ -586,8 +586,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -687,8 +688,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteRouteRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -816,8 +818,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -904,8 +907,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -983,8 +987,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteRouteReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1112,8 +1117,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1198,8 +1204,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Route.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Route.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1295,8 +1302,9 @@ def test_get_rest_required_fields(request_type=compute.GetRouteRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Route.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Route.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1424,8 +1432,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Route.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Route.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1511,6 +1520,73 @@ def test_insert_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRouteRequest.meta.fields["route_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["route_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["route_resource"][field])): + del request_init["route_resource"][field][i][subfield] + else: + del request_init["route_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1544,8 +1620,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1642,8 +1719,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertRouteRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1733,38 +1811,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["route_resource"] = { - "as_paths": [ - {"as_lists": [867, 868], "path_segment_type": "path_segment_type_value"} - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dest_range": "dest_range_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "next_hop_gateway": "next_hop_gateway_value", - "next_hop_hub": "next_hop_hub_value", - "next_hop_ilb": "next_hop_ilb_value", - "next_hop_instance": "next_hop_instance_value", - "next_hop_ip": "next_hop_ip_value", - "next_hop_network": "next_hop_network_value", - "next_hop_peering": "next_hop_peering_value", - "next_hop_vpn_tunnel": "next_hop_vpn_tunnel_value", - "priority": 898, - "route_status": "route_status_value", - "route_type": "route_type_value", - "self_link": "self_link_value", - "tags": ["tags_value1", "tags_value2"], - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1805,8 +1851,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1893,6 +1940,73 @@ def test_insert_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRouteRequest.meta.fields["route_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["route_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["route_resource"][field])): + del request_init["route_resource"][field][i][subfield] + else: + del request_init["route_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1926,8 +2040,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2002,8 +2117,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertRouteReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2093,38 +2209,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["route_resource"] = { - "as_paths": [ - {"as_lists": [867, 868], "path_segment_type": "path_segment_type_value"} - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dest_range": "dest_range_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "next_hop_gateway": "next_hop_gateway_value", - "next_hop_hub": "next_hop_hub_value", - "next_hop_ilb": "next_hop_ilb_value", - "next_hop_instance": "next_hop_instance_value", - "next_hop_ip": "next_hop_ip_value", - "next_hop_network": "next_hop_network_value", - "next_hop_peering": "next_hop_peering_value", - "next_hop_vpn_tunnel": "next_hop_vpn_tunnel_value", - "priority": 898, - "route_status": "route_status_value", - "route_type": "route_type_value", - "self_link": "self_link_value", - "tags": ["tags_value1", "tags_value2"], - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2165,8 +2249,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2236,8 +2321,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouteList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouteList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2323,8 +2409,9 @@ def test_list_rest_required_fields(request_type=compute.ListRoutesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouteList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouteList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2454,8 +2541,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.RouteList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.RouteList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_security_policies.py b/tests/unit/gapic/compute_v1/test_security_policies.py index 4308494b..f125fb8d 100644 --- a/tests/unit/gapic/compute_v1/test_security_policies.py +++ b/tests/unit/gapic/compute_v1/test_security_policies.py @@ -657,6 +657,81 @@ def test_add_rule_rest(request_type): }, "redirect_options": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -690,8 +765,9 @@ def test_add_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -794,8 +870,9 @@ def test_add_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -890,70 +967,6 @@ def test_add_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "security_policy": "sample2"} - request_init["security_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -995,8 +1008,9 @@ def test_add_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1117,6 +1131,81 @@ def test_add_rule_unary_rest(request_type): }, "redirect_options": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1150,8 +1239,9 @@ def test_add_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1232,8 +1322,9 @@ def test_add_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1328,70 +1419,6 @@ def test_add_rule_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "security_policy": "sample2"} - request_init["security_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1433,8 +1460,9 @@ def test_add_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1508,8 +1536,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1600,8 +1629,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1738,8 +1768,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1894,8 +1925,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1995,8 +2027,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteSecurityPolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2128,8 +2161,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2216,8 +2250,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2297,8 +2332,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2430,8 +2466,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2506,8 +2543,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2593,8 +2631,9 @@ def test_get_rest_required_fields(request_type=compute.GetSecurityPolicyRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2728,8 +2767,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2799,8 +2839,9 @@ def test_get_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2885,8 +2926,9 @@ def test_get_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3020,8 +3062,9 @@ def test_get_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyRule.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3172,6 +3215,77 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3205,8 +3319,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3308,8 +3423,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertSecurityPolicyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3408,103 +3524,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3549,8 +3568,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3707,6 +3727,77 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3740,8 +3831,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3823,8 +3915,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3923,103 +4016,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4064,8 +4060,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4139,8 +4136,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4225,8 +4223,9 @@ def test_list_rest_required_fields(request_type=compute.ListSecurityPoliciesRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4362,8 +4361,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SecurityPolicyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4481,12 +4481,13 @@ def test_list_preconfigured_expression_sets_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4572,12 +4573,13 @@ def test_list_preconfigured_expression_sets_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4722,12 +4724,13 @@ def test_list_preconfigured_expression_sets_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4879,6 +4882,77 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4912,8 +4986,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5014,8 +5089,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchSecurityPolicyRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5110,103 +5186,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "security_policy": "sample2"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5252,8 +5231,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5411,6 +5391,77 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5444,8 +5495,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5526,8 +5578,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5622,103 +5675,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "security_policy": "sample2"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5764,8 +5720,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5890,6 +5847,81 @@ def test_patch_rule_rest(request_type): }, "redirect_options": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5923,8 +5955,9 @@ def test_patch_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6032,8 +6065,9 @@ def test_patch_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6133,70 +6167,6 @@ def test_patch_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "security_policy": "sample2"} - request_init["security_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6238,8 +6208,9 @@ def test_patch_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6360,6 +6331,81 @@ def test_patch_rule_unary_rest(request_type): }, "redirect_options": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6393,8 +6439,9 @@ def test_patch_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6480,8 +6527,9 @@ def test_patch_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6581,70 +6629,6 @@ def test_patch_rule_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "security_policy": "sample2"} - request_init["security_policy_rule_resource"] = { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6686,8 +6670,9 @@ def test_patch_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6777,8 +6762,9 @@ def test_remove_rule_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6880,8 +6866,9 @@ def test_remove_rule_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7013,8 +7000,9 @@ def test_remove_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7101,8 +7089,9 @@ def test_remove_rule_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7182,8 +7171,9 @@ def test_remove_rule_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7315,8 +7305,9 @@ def test_remove_rule_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7374,6 +7365,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsSecurityPolicyRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7407,8 +7473,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7509,8 +7576,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7605,10 +7673,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7650,8 +7714,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7712,6 +7777,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsSecurityPolicyRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7745,8 +7885,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7825,8 +7966,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7921,10 +8063,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7966,8 +8104,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_service_attachments.py b/tests/unit/gapic/compute_v1/test_service_attachments.py index 074f6b74..4dcc1f5a 100644 --- a/tests/unit/gapic/compute_v1/test_service_attachments.py +++ b/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -839,8 +841,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -999,8 +1002,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1106,8 +1110,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1249,8 +1254,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1342,8 +1348,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1427,8 +1434,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1570,8 +1578,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1657,8 +1666,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1754,8 +1764,9 @@ def test_get_rest_required_fields(request_type=compute.GetServiceAttachmentReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1899,8 +1910,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1969,8 +1981,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2057,8 +2070,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2196,8 +2210,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2289,6 +2304,79 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "target_service": "target_service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertServiceAttachmentRequest.meta.fields[ + "service_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "service_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["service_attachment_resource"][field]) + ): + del request_init["service_attachment_resource"][field][i][subfield] + else: + del request_init["service_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2322,8 +2410,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2426,8 +2515,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2522,43 +2612,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["service_attachment_resource"] = { - "connected_endpoints": [ - { - "consumer_network": "consumer_network_value", - "endpoint": "endpoint_value", - "psc_connection_id": 1793, - "status": "status_value", - } - ], - "connection_preference": "connection_preference_value", - "consumer_accept_lists": [ - { - "connection_limit": 1710, - "network_url": "network_url_value", - "project_id_or_num": "project_id_or_num_value", - } - ], - "consumer_reject_lists": [ - "consumer_reject_lists_value1", - "consumer_reject_lists_value2", - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "domain_names": ["domain_names_value1", "domain_names_value2"], - "enable_proxy_protocol": True, - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "nat_subnets": ["nat_subnets_value1", "nat_subnets_value2"], - "producer_forwarding_rule": "producer_forwarding_rule_value", - "psc_service_attachment_id": {"high": 416, "low": 338}, - "reconcile_connections": True, - "region": "region_value", - "self_link": "self_link_value", - "target_service": "target_service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2604,8 +2657,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2703,6 +2757,79 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "target_service": "target_service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertServiceAttachmentRequest.meta.fields[ + "service_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "service_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["service_attachment_resource"][field]) + ): + del request_init["service_attachment_resource"][field][i][subfield] + else: + del request_init["service_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2736,8 +2863,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2818,8 +2946,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2914,43 +3043,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["service_attachment_resource"] = { - "connected_endpoints": [ - { - "consumer_network": "consumer_network_value", - "endpoint": "endpoint_value", - "psc_connection_id": 1793, - "status": "status_value", - } - ], - "connection_preference": "connection_preference_value", - "consumer_accept_lists": [ - { - "connection_limit": 1710, - "network_url": "network_url_value", - "project_id_or_num": "project_id_or_num_value", - } - ], - "consumer_reject_lists": [ - "consumer_reject_lists_value1", - "consumer_reject_lists_value2", - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "domain_names": ["domain_names_value1", "domain_names_value2"], - "enable_proxy_protocol": True, - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "nat_subnets": ["nat_subnets_value1", "nat_subnets_value2"], - "producer_forwarding_rule": "producer_forwarding_rule_value", - "psc_service_attachment_id": {"high": 416, "low": 338}, - "reconcile_connections": True, - "region": "region_value", - "self_link": "self_link_value", - "target_service": "target_service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2996,8 +3088,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3073,8 +3166,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3164,8 +3258,9 @@ def test_list_rest_required_fields(request_type=compute.ListServiceAttachmentsRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3307,8 +3402,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ServiceAttachmentList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ServiceAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3458,6 +3554,79 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "target_service": "target_service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchServiceAttachmentRequest.meta.fields[ + "service_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "service_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["service_attachment_resource"][field]) + ): + del request_init["service_attachment_resource"][field][i][subfield] + else: + del request_init["service_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3491,8 +3660,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3597,8 +3767,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchServiceAttachmentR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3698,43 +3869,6 @@ def test_patch_rest_bad_request( "region": "sample2", "service_attachment": "sample3", } - request_init["service_attachment_resource"] = { - "connected_endpoints": [ - { - "consumer_network": "consumer_network_value", - "endpoint": "endpoint_value", - "psc_connection_id": 1793, - "status": "status_value", - } - ], - "connection_preference": "connection_preference_value", - "consumer_accept_lists": [ - { - "connection_limit": 1710, - "network_url": "network_url_value", - "project_id_or_num": "project_id_or_num_value", - } - ], - "consumer_reject_lists": [ - "consumer_reject_lists_value1", - "consumer_reject_lists_value2", - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "domain_names": ["domain_names_value1", "domain_names_value2"], - "enable_proxy_protocol": True, - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "nat_subnets": ["nat_subnets_value1", "nat_subnets_value2"], - "producer_forwarding_rule": "producer_forwarding_rule_value", - "psc_service_attachment_id": {"high": 416, "low": 338}, - "reconcile_connections": True, - "region": "region_value", - "self_link": "self_link_value", - "target_service": "target_service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3785,8 +3919,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3889,6 +4024,79 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "target_service": "target_service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchServiceAttachmentRequest.meta.fields[ + "service_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "service_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["service_attachment_resource"][field]) + ): + del request_init["service_attachment_resource"][field][i][subfield] + else: + del request_init["service_attachment_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3922,8 +4130,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4008,8 +4217,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4109,43 +4319,6 @@ def test_patch_unary_rest_bad_request( "region": "sample2", "service_attachment": "sample3", } - request_init["service_attachment_resource"] = { - "connected_endpoints": [ - { - "consumer_network": "consumer_network_value", - "endpoint": "endpoint_value", - "psc_connection_id": 1793, - "status": "status_value", - } - ], - "connection_preference": "connection_preference_value", - "consumer_accept_lists": [ - { - "connection_limit": 1710, - "network_url": "network_url_value", - "project_id_or_num": "project_id_or_num_value", - } - ], - "consumer_reject_lists": [ - "consumer_reject_lists_value1", - "consumer_reject_lists_value2", - ], - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "domain_names": ["domain_names_value1", "domain_names_value2"], - "enable_proxy_protocol": True, - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "nat_subnets": ["nat_subnets_value1", "nat_subnets_value2"], - "producer_forwarding_rule": "producer_forwarding_rule_value", - "psc_service_attachment_id": {"high": 416, "low": 338}, - "reconcile_connections": True, - "region": "region_value", - "self_link": "self_link_value", - "target_service": "target_service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4196,8 +4369,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4336,6 +4510,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyServiceAttachmentRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4350,8 +4599,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4437,8 +4687,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4534,83 +4785,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4657,8 +4831,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4719,6 +4894,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsServiceAttachmentRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4731,8 +4981,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4816,8 +5067,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4916,9 +5168,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4965,8 +5214,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_snapshots.py b/tests/unit/gapic/compute_v1/test_snapshots.py index 82ddbe0b..d16d61b4 100644 --- a/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/tests/unit/gapic/compute_v1/test_snapshots.py @@ -593,8 +593,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -694,8 +695,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteSnapshotRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -823,8 +825,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -911,8 +914,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -990,8 +994,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteSnapshotRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1119,8 +1124,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1211,8 +1217,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1320,8 +1327,9 @@ def test_get_rest_required_fields(request_type=compute.GetSnapshotRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1449,8 +1457,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1518,8 +1527,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1602,8 +1612,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1733,8 +1744,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1825,6 +1837,73 @@ def test_insert_rest(request_type): "storage_bytes_status": "storage_bytes_status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSnapshotRequest.meta.fields["snapshot_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1858,8 +1937,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1956,8 +2036,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertSnapshotRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2047,43 +2128,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["snapshot_resource"] = { - "architecture": "architecture_value", - "auto_created": True, - "chain_name": "chain_name_value", - "creation_size_bytes": 2037, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_size_gb": 1261, - "download_bytes": 1502, - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "snapshot_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "snapshot_type": "snapshot_type_value", - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", - "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", - "status": "status_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2122,8 +2166,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2214,6 +2259,73 @@ def test_insert_unary_rest(request_type): "storage_bytes_status": "storage_bytes_status_value", "storage_locations": ["storage_locations_value1", "storage_locations_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSnapshotRequest.meta.fields["snapshot_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2247,8 +2359,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2323,8 +2436,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertSnapshotRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2414,43 +2528,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["snapshot_resource"] = { - "architecture": "architecture_value", - "auto_created": True, - "chain_name": "chain_name_value", - "creation_size_bytes": 2037, - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "disk_size_gb": 1261, - "download_bytes": 1502, - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "license_codes": [1361, 1362], - "licenses": ["licenses_value1", "licenses_value2"], - "location_hint": "location_hint_value", - "name": "name_value", - "satisfies_pzs": True, - "self_link": "self_link_value", - "snapshot_encryption_key": { - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "sha256": "sha256_value", - }, - "snapshot_type": "snapshot_type_value", - "source_disk": "source_disk_value", - "source_disk_encryption_key": {}, - "source_disk_id": "source_disk_id_value", - "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", - "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", - "status": "status_value", - "storage_bytes": 1403, - "storage_bytes_status": "storage_bytes_status_value", - "storage_locations": ["storage_locations_value1", "storage_locations_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2489,8 +2566,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2559,8 +2637,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SnapshotList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2646,8 +2725,9 @@ def test_list_rest_required_fields(request_type=compute.ListSnapshotsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SnapshotList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2777,8 +2857,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SnapshotList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2963,6 +3044,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicySnapshotRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2977,8 +3133,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3060,8 +3217,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3154,83 +3312,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3272,8 +3353,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3334,6 +3416,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsSnapshotRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3367,8 +3524,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3467,8 +3625,9 @@ def test_set_labels_rest_required_fields(request_type=compute.SetLabelsSnapshotR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3561,10 +3720,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3606,8 +3761,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3668,6 +3824,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsSnapshotRequest.meta.fields[ + "global_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_labels_request_resource"][field]) + ): + del request_init["global_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3701,8 +3932,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3781,8 +4013,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3875,10 +4108,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3920,8 +4149,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3981,6 +4211,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsSnapshotRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3993,8 +4298,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4074,8 +4380,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4170,9 +4477,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "resource": "sample2"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4214,8 +4518,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/tests/unit/gapic/compute_v1/test_ssl_certificates.py index c4d451e1..7c29bd00 100644 --- a/tests/unit/gapic/compute_v1/test_ssl_certificates.py +++ b/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -603,8 +603,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -694,8 +695,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -832,8 +834,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -988,8 +991,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1089,8 +1093,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteSslCertificateRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1222,8 +1227,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1310,8 +1316,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1391,8 +1398,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1524,8 +1532,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1602,8 +1611,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1691,8 +1701,9 @@ def test_get_rest_required_fields(request_type=compute.GetSslCertificateRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1826,8 +1837,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1907,6 +1919,77 @@ def test_insert_rest(request_type): ], "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSslCertificateRequest.meta.fields[ + "ssl_certificate_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_certificate_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_certificate_resource"][field])): + del request_init["ssl_certificate_resource"][field][i][subfield] + else: + del request_init["ssl_certificate_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1940,8 +2023,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2038,8 +2122,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertSslCertificateRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2133,32 +2218,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["ssl_certificate_resource"] = { - "certificate": "certificate_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "expire_time": "expire_time_value", - "id": 205, - "kind": "kind_value", - "managed": { - "domain_status": {}, - "domains": ["domains_value1", "domains_value2"], - "status": "status_value", - }, - "name": "name_value", - "private_key": "private_key_value", - "region": "region_value", - "self_link": "self_link_value", - "self_managed": { - "certificate": "certificate_value", - "private_key": "private_key_value", - }, - "subject_alternative_names": [ - "subject_alternative_names_value1", - "subject_alternative_names_value2", - ], - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2199,8 +2258,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2282,6 +2342,77 @@ def test_insert_unary_rest(request_type): ], "type_": "type__value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSslCertificateRequest.meta.fields[ + "ssl_certificate_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_certificate_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_certificate_resource"][field])): + del request_init["ssl_certificate_resource"][field][i][subfield] + else: + del request_init["ssl_certificate_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2315,8 +2446,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2393,8 +2525,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2488,32 +2621,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["ssl_certificate_resource"] = { - "certificate": "certificate_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "expire_time": "expire_time_value", - "id": 205, - "kind": "kind_value", - "managed": { - "domain_status": {}, - "domains": ["domains_value1", "domains_value2"], - "status": "status_value", - }, - "name": "name_value", - "private_key": "private_key_value", - "region": "region_value", - "self_link": "self_link_value", - "self_managed": { - "certificate": "certificate_value", - "private_key": "private_key_value", - }, - "subject_alternative_names": [ - "subject_alternative_names_value1", - "subject_alternative_names_value2", - ], - "type_": "type__value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2554,8 +2661,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2626,8 +2734,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2713,8 +2822,9 @@ def test_list_rest_required_fields(request_type=compute.ListSslCertificatesReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2850,8 +2960,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslCertificateList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_ssl_policies.py b/tests/unit/gapic/compute_v1/test_ssl_policies.py index 22c6a073..a52395cc 100644 --- a/tests/unit/gapic/compute_v1/test_ssl_policies.py +++ b/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -581,8 +581,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -673,8 +674,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -811,8 +813,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -965,8 +968,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1066,8 +1070,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteSslPolicyRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1197,8 +1202,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1285,8 +1291,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1364,8 +1371,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteSslPolicyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1495,8 +1503,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1573,8 +1582,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1662,8 +1672,9 @@ def test_get_rest_required_fields(request_type=compute.GetSslPolicyRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1793,8 +1804,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1869,6 +1881,73 @@ def test_insert_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1902,8 +1981,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2000,8 +2080,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertSslPolicyRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2093,27 +2174,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2154,8 +2214,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2232,6 +2293,73 @@ def test_insert_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2265,8 +2393,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2341,8 +2470,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertSslPolicyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2434,27 +2564,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2495,8 +2604,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2567,8 +2677,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2654,8 +2765,9 @@ def test_list_rest_required_fields(request_type=compute.ListSslPoliciesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2789,8 +2901,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2910,10 +3023,9 @@ def test_list_available_features_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2998,10 +3110,11 @@ def test_list_available_features_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( + # Convert return value to protobuf type + return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3140,10 +3253,9 @@ def test_list_available_features_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3217,6 +3329,73 @@ def test_patch_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3250,8 +3429,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3352,8 +3532,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchSslPolicyRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3446,27 +3627,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "ssl_policy": "sample2"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3508,8 +3668,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3587,6 +3748,73 @@ def test_patch_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSslPolicyRequest.meta.fields["ssl_policy_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["ssl_policy_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["ssl_policy_resource"][field])): + del request_init["ssl_policy_resource"][field][i][subfield] + else: + del request_init["ssl_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3620,8 +3848,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3700,8 +3929,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchSslPolicyReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3794,27 +4024,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "ssl_policy": "sample2"} - request_init["ssl_policy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "custom_features": ["custom_features_value1", "custom_features_value2"], - "description": "description_value", - "enabled_features": ["enabled_features_value1", "enabled_features_value2"], - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "min_tls_version": "min_tls_version_value", - "name": "name_value", - "profile": "profile_value", - "region": "region_value", - "self_link": "self_link_value", - "warnings": [ - { - "code": "code_value", - "data": [{"key": "key_value", "value": "value_value"}], - "message": "message_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3856,8 +4065,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_subnetworks.py b/tests/unit/gapic/compute_v1/test_subnetworks.py index 2aadc77d..77f89c7d 100644 --- a/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -580,8 +580,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SubnetworkAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SubnetworkAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -671,8 +672,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SubnetworkAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SubnetworkAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -809,8 +811,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SubnetworkAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SubnetworkAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -963,8 +966,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteSubnetworkReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1207,8 +1212,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1296,8 +1302,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1381,8 +1388,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1520,8 +1528,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1579,6 +1588,88 @@ def test_expand_ip_cidr_range_rest(request_type): request_init["subnetworks_expand_ip_cidr_range_request_resource"] = { "ip_cidr_range": "ip_cidr_range_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ExpandIpCidrRangeSubnetworkRequest.meta.fields[ + "subnetworks_expand_ip_cidr_range_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ][field] + ), + ): + del request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ][field][i][subfield] + else: + del request_init["subnetworks_expand_ip_cidr_range_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1612,8 +1703,9 @@ def test_expand_ip_cidr_range_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1720,8 +1812,9 @@ def test_expand_ip_cidr_range_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1817,9 +1910,6 @@ def test_expand_ip_cidr_range_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} - request_init["subnetworks_expand_ip_cidr_range_request_resource"] = { - "ip_cidr_range": "ip_cidr_range_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1866,8 +1956,9 @@ def test_expand_ip_cidr_range_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1928,6 +2019,88 @@ def test_expand_ip_cidr_range_unary_rest(request_type): request_init["subnetworks_expand_ip_cidr_range_request_resource"] = { "ip_cidr_range": "ip_cidr_range_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ExpandIpCidrRangeSubnetworkRequest.meta.fields[ + "subnetworks_expand_ip_cidr_range_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ][field] + ), + ): + del request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ][field][i][subfield] + else: + del request_init["subnetworks_expand_ip_cidr_range_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1961,8 +2134,9 @@ def test_expand_ip_cidr_range_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2047,8 +2221,9 @@ def test_expand_ip_cidr_range_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2144,9 +2319,6 @@ def test_expand_ip_cidr_range_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} - request_init["subnetworks_expand_ip_cidr_range_request_resource"] = { - "ip_cidr_range": "ip_cidr_range_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2193,8 +2365,9 @@ def test_expand_ip_cidr_range_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2285,8 +2458,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Subnetwork.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Subnetwork.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2388,8 +2562,9 @@ def test_get_rest_required_fields(request_type=compute.GetSubnetworkRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Subnetwork.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Subnetwork.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2525,8 +2700,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Subnetwork.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Subnetwork.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2595,8 +2771,9 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2683,8 +2860,9 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2822,8 +3000,9 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2913,6 +3092,73 @@ def test_insert_rest(request_type): "stack_type": "stack_type_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSubnetworkRequest.meta.fields["subnetwork_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["subnetwork_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["subnetwork_resource"][field])): + del request_init["subnetwork_resource"][field][i][subfield] + else: + del request_init["subnetwork_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2946,8 +3192,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3048,8 +3295,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertSubnetworkReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3144,41 +3392,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["subnetwork_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_flow_logs": True, - "external_ipv6_prefix": "external_ipv6_prefix_value", - "fingerprint": "fingerprint_value", - "gateway_address": "gateway_address_value", - "id": 205, - "internal_ipv6_prefix": "internal_ipv6_prefix_value", - "ip_cidr_range": "ip_cidr_range_value", - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_cidr_range": "ipv6_cidr_range_value", - "kind": "kind_value", - "log_config": { - "aggregation_interval": "aggregation_interval_value", - "enable": True, - "filter_expr": "filter_expr_value", - "flow_sampling": 0.1394, - "metadata": "metadata_value", - "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], - }, - "name": "name_value", - "network": "network_value", - "private_ip_google_access": True, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "purpose": "purpose_value", - "region": "region_value", - "role": "role_value", - "secondary_ip_ranges": [ - {"ip_cidr_range": "ip_cidr_range_value", "range_name": "range_name_value"} - ], - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3220,8 +3433,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3313,6 +3527,73 @@ def test_insert_unary_rest(request_type): "stack_type": "stack_type_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertSubnetworkRequest.meta.fields["subnetwork_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["subnetwork_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["subnetwork_resource"][field])): + del request_init["subnetwork_resource"][field][i][subfield] + else: + del request_init["subnetwork_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3346,8 +3627,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3428,8 +3710,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3524,41 +3807,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["subnetwork_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_flow_logs": True, - "external_ipv6_prefix": "external_ipv6_prefix_value", - "fingerprint": "fingerprint_value", - "gateway_address": "gateway_address_value", - "id": 205, - "internal_ipv6_prefix": "internal_ipv6_prefix_value", - "ip_cidr_range": "ip_cidr_range_value", - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_cidr_range": "ipv6_cidr_range_value", - "kind": "kind_value", - "log_config": { - "aggregation_interval": "aggregation_interval_value", - "enable": True, - "filter_expr": "filter_expr_value", - "flow_sampling": 0.1394, - "metadata": "metadata_value", - "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], - }, - "name": "name_value", - "network": "network_value", - "private_ip_google_access": True, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "purpose": "purpose_value", - "region": "region_value", - "role": "role_value", - "secondary_ip_ranges": [ - {"ip_cidr_range": "ip_cidr_range_value", "range_name": "range_name_value"} - ], - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3600,8 +3848,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3673,8 +3922,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SubnetworkList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SubnetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3764,8 +4014,9 @@ def test_list_rest_required_fields(request_type=compute.ListSubnetworksRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SubnetworkList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SubnetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3905,8 +4156,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.SubnetworkList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.SubnetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4030,8 +4282,9 @@ def test_list_usable_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4119,8 +4372,9 @@ def test_list_usable_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4256,8 +4510,9 @@ def test_list_usable_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4402,6 +4657,73 @@ def test_patch_rest(request_type): "stack_type": "stack_type_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSubnetworkRequest.meta.fields["subnetwork_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["subnetwork_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["subnetwork_resource"][field])): + del request_init["subnetwork_resource"][field][i][subfield] + else: + del request_init["subnetwork_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4435,8 +4757,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4546,8 +4869,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchSubnetworkRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4646,41 +4970,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} - request_init["subnetwork_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_flow_logs": True, - "external_ipv6_prefix": "external_ipv6_prefix_value", - "fingerprint": "fingerprint_value", - "gateway_address": "gateway_address_value", - "id": 205, - "internal_ipv6_prefix": "internal_ipv6_prefix_value", - "ip_cidr_range": "ip_cidr_range_value", - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_cidr_range": "ipv6_cidr_range_value", - "kind": "kind_value", - "log_config": { - "aggregation_interval": "aggregation_interval_value", - "enable": True, - "filter_expr": "filter_expr_value", - "flow_sampling": 0.1394, - "metadata": "metadata_value", - "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], - }, - "name": "name_value", - "network": "network_value", - "private_ip_google_access": True, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "purpose": "purpose_value", - "region": "region_value", - "role": "role_value", - "secondary_ip_ranges": [ - {"ip_cidr_range": "ip_cidr_range_value", "range_name": "range_name_value"} - ], - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4727,8 +5016,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4821,6 +5111,73 @@ def test_patch_unary_rest(request_type): "stack_type": "stack_type_value", "state": "state_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSubnetworkRequest.meta.fields["subnetwork_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["subnetwork_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["subnetwork_resource"][field])): + del request_init["subnetwork_resource"][field][i][subfield] + else: + del request_init["subnetwork_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4854,8 +5211,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4943,8 +5301,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchSubnetworkRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5043,41 +5402,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} - request_init["subnetwork_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "enable_flow_logs": True, - "external_ipv6_prefix": "external_ipv6_prefix_value", - "fingerprint": "fingerprint_value", - "gateway_address": "gateway_address_value", - "id": 205, - "internal_ipv6_prefix": "internal_ipv6_prefix_value", - "ip_cidr_range": "ip_cidr_range_value", - "ipv6_access_type": "ipv6_access_type_value", - "ipv6_cidr_range": "ipv6_cidr_range_value", - "kind": "kind_value", - "log_config": { - "aggregation_interval": "aggregation_interval_value", - "enable": True, - "filter_expr": "filter_expr_value", - "flow_sampling": 0.1394, - "metadata": "metadata_value", - "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], - }, - "name": "name_value", - "network": "network_value", - "private_ip_google_access": True, - "private_ipv6_google_access": "private_ipv6_google_access_value", - "purpose": "purpose_value", - "region": "region_value", - "role": "role_value", - "secondary_ip_ranges": [ - {"ip_cidr_range": "ip_cidr_range_value", "range_name": "range_name_value"} - ], - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "state": "state_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5124,8 +5448,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5260,6 +5585,81 @@ def test_set_iam_policy_rest(request_type): "version": 774, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicySubnetworkRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5274,8 +5674,9 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5361,8 +5762,9 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5458,83 +5860,6 @@ def test_set_iam_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5581,8 +5906,9 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Policy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5643,6 +5969,88 @@ def test_set_private_ip_google_access_rest(request_type): request_init["subnetworks_set_private_ip_google_access_request_resource"] = { "private_ip_google_access": True } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetPrivateIpGoogleAccessSubnetworkRequest.meta.fields[ + "subnetworks_set_private_ip_google_access_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ][field] + ), + ): + del request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ][field][i][subfield] + else: + del request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5676,8 +6084,9 @@ def test_set_private_ip_google_access_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5784,8 +6193,9 @@ def test_set_private_ip_google_access_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5882,9 +6292,6 @@ def test_set_private_ip_google_access_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} - request_init["subnetworks_set_private_ip_google_access_request_resource"] = { - "private_ip_google_access": True - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5931,8 +6338,9 @@ def test_set_private_ip_google_access_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5993,6 +6401,88 @@ def test_set_private_ip_google_access_unary_rest(request_type): request_init["subnetworks_set_private_ip_google_access_request_resource"] = { "private_ip_google_access": True } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetPrivateIpGoogleAccessSubnetworkRequest.meta.fields[ + "subnetworks_set_private_ip_google_access_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ][field] + ), + ): + del request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ][field][i][subfield] + else: + del request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6026,8 +6516,9 @@ def test_set_private_ip_google_access_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6112,8 +6603,9 @@ def test_set_private_ip_google_access_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6210,9 +6702,6 @@ def test_set_private_ip_google_access_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} - request_init["subnetworks_set_private_ip_google_access_request_resource"] = { - "private_ip_google_access": True - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6259,8 +6748,9 @@ def test_set_private_ip_google_access_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6323,6 +6813,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsSubnetworkRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6335,8 +6900,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6420,8 +6986,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6519,9 +7086,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6568,8 +7132,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py b/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py index 03d8a52a..c07c233a 100644 --- a/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py @@ -626,8 +626,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -727,8 +728,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteTargetGrpcProxyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -860,8 +862,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -948,8 +951,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1029,8 +1033,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1162,8 +1167,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1238,8 +1244,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetGrpcProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetGrpcProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1325,8 +1332,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetGrpcProxyRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetGrpcProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetGrpcProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1460,8 +1468,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetGrpcProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetGrpcProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1527,6 +1536,79 @@ def test_insert_rest(request_type): "url_map": "url_map_value", "validate_for_proxyless": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetGrpcProxyRequest.meta.fields[ + "target_grpc_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_grpc_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_grpc_proxy_resource"][field]) + ): + del request_init["target_grpc_proxy_resource"][field][i][subfield] + else: + del request_init["target_grpc_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1560,8 +1642,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1658,8 +1741,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertTargetGrpcProxyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1753,18 +1837,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_grpc_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "url_map": "url_map_value", - "validate_for_proxyless": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1805,8 +1877,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1874,6 +1947,79 @@ def test_insert_unary_rest(request_type): "url_map": "url_map_value", "validate_for_proxyless": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetGrpcProxyRequest.meta.fields[ + "target_grpc_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_grpc_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_grpc_proxy_resource"][field]) + ): + del request_init["target_grpc_proxy_resource"][field][i][subfield] + else: + del request_init["target_grpc_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1907,8 +2053,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1985,8 +2132,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2080,18 +2228,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_grpc_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "url_map": "url_map_value", - "validate_for_proxyless": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2132,8 +2268,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2204,8 +2341,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetGrpcProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetGrpcProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2291,8 +2429,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetGrpcProxiesReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetGrpcProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetGrpcProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2428,8 +2567,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetGrpcProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetGrpcProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2549,6 +2689,79 @@ def test_patch_rest(request_type): "url_map": "url_map_value", "validate_for_proxyless": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchTargetGrpcProxyRequest.meta.fields[ + "target_grpc_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_grpc_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_grpc_proxy_resource"][field]) + ): + del request_init["target_grpc_proxy_resource"][field][i][subfield] + else: + del request_init["target_grpc_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2582,8 +2795,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2684,8 +2898,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchTargetGrpcProxyReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2780,18 +2995,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} - request_init["target_grpc_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "url_map": "url_map_value", - "validate_for_proxyless": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2833,8 +3036,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2903,6 +3107,79 @@ def test_patch_unary_rest(request_type): "url_map": "url_map_value", "validate_for_proxyless": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchTargetGrpcProxyRequest.meta.fields[ + "target_grpc_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_grpc_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_grpc_proxy_resource"][field]) + ): + del request_init["target_grpc_proxy_resource"][field][i][subfield] + else: + del request_init["target_grpc_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2936,8 +3213,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3018,8 +3296,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3114,18 +3393,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} - request_init["target_grpc_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "self_link": "self_link_value", - "self_link_with_id": "self_link_with_id_value", - "url_map": "url_map_value", - "validate_for_proxyless": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3167,8 +3434,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/tests/unit/gapic/compute_v1/test_target_http_proxies.py index c2238d1d..6993d96a 100644 --- a/tests/unit/gapic/compute_v1/test_target_http_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -838,8 +840,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -994,8 +997,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1095,8 +1099,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteTargetHttpProxyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1228,8 +1233,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1316,8 +1322,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1397,8 +1404,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1530,8 +1538,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1607,8 +1616,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1695,8 +1705,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetHttpProxyRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1830,8 +1841,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1898,6 +1910,79 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetHttpProxyRequest.meta.fields[ + "target_http_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_http_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_http_proxy_resource"][field]) + ): + del request_init["target_http_proxy_resource"][field][i][subfield] + else: + del request_init["target_http_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1931,8 +2016,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2029,8 +2115,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertTargetHttpProxyR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2124,19 +2211,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_http_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "region": "region_value", - "self_link": "self_link_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2177,8 +2251,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2247,6 +2322,79 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetHttpProxyRequest.meta.fields[ + "target_http_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_http_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_http_proxy_resource"][field]) + ): + del request_init["target_http_proxy_resource"][field][i][subfield] + else: + del request_init["target_http_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2280,8 +2428,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2358,8 +2507,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2453,19 +2603,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_http_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "region": "region_value", - "self_link": "self_link_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2506,8 +2643,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2578,8 +2716,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2665,8 +2804,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetHttpProxiesReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2802,8 +2942,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2924,6 +3065,79 @@ def test_patch_rest(request_type): "self_link": "self_link_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchTargetHttpProxyRequest.meta.fields[ + "target_http_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_http_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_http_proxy_resource"][field]) + ): + del request_init["target_http_proxy_resource"][field][i][subfield] + else: + del request_init["target_http_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2957,8 +3171,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3059,8 +3274,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpProxyReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3155,19 +3371,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_http_proxy": "sample2"} - request_init["target_http_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "region": "region_value", - "self_link": "self_link_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3209,8 +3412,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3280,6 +3484,79 @@ def test_patch_unary_rest(request_type): "self_link": "self_link_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchTargetHttpProxyRequest.meta.fields[ + "target_http_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_http_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_http_proxy_resource"][field]) + ): + del request_init["target_http_proxy_resource"][field][i][subfield] + else: + del request_init["target_http_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3313,8 +3590,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3395,8 +3673,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3491,19 +3770,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_http_proxy": "sample2"} - request_init["target_http_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "region": "region_value", - "self_link": "self_link_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3545,8 +3811,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3604,6 +3871,79 @@ def test_set_url_map_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_http_proxy": "sample2"} request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapTargetHttpProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3637,8 +3977,9 @@ def test_set_url_map_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3741,8 +4082,9 @@ def test_set_url_map_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3837,7 +4179,6 @@ def test_set_url_map_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_http_proxy": "sample2"} - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3877,8 +4218,9 @@ def test_set_url_map_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3934,6 +4276,79 @@ def test_set_url_map_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_http_proxy": "sample2"} request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapTargetHttpProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3967,8 +4382,9 @@ def test_set_url_map_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4049,8 +4465,9 @@ def test_set_url_map_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4145,7 +4562,6 @@ def test_set_url_map_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_http_proxy": "sample2"} - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4185,8 +4601,9 @@ def test_set_url_map_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/tests/unit/gapic/compute_v1/test_target_https_proxies.py index 2e2ea827..52a6d4a5 100644 --- a/tests/unit/gapic/compute_v1/test_target_https_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -839,8 +841,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -995,8 +998,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1098,8 +1102,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1231,8 +1236,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1319,8 +1325,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1400,8 +1407,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1533,8 +1541,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1616,8 +1625,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1710,8 +1720,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetHttpsProxyReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1845,8 +1856,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1919,6 +1931,79 @@ def test_insert_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1952,8 +2037,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2052,8 +2138,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2147,25 +2234,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2206,8 +2274,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2282,6 +2351,79 @@ def test_insert_unary_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2315,8 +2457,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2393,8 +2536,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2488,25 +2632,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2547,8 +2672,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2619,8 +2745,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2706,8 +2833,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetHttpsProxiesRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2843,8 +2971,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetHttpsProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2971,6 +3100,79 @@ def test_patch_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3004,8 +3206,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3106,8 +3309,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpsProxyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3202,25 +3406,6 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3262,8 +3447,9 @@ def test_patch_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3339,6 +3525,79 @@ def test_patch_unary_rest(request_type): "ssl_policy": "ssl_policy_value", "url_map": "url_map_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchTargetHttpsProxyRequest.meta.fields[ + "target_https_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_https_proxy_resource"][field]) + ): + del request_init["target_https_proxy_resource"][field][i][subfield] + else: + del request_init["target_https_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3372,8 +3631,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3454,8 +3714,9 @@ def test_patch_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3550,25 +3811,6 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxy_resource"] = { - "authorization_policy": "authorization_policy_value", - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "http_keep_alive_timeout_sec": 2868, - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "quic_override": "quic_override_value", - "region": "region_value", - "self_link": "self_link_value", - "server_tls_policy": "server_tls_policy_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - "url_map": "url_map_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3610,8 +3852,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3671,6 +3914,88 @@ def test_set_certificate_map_rest(request_type): request_init["target_https_proxies_set_certificate_map_request_resource"] = { "certificate_map": "certificate_map_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetCertificateMapTargetHttpsProxyRequest.meta.fields[ + "target_https_proxies_set_certificate_map_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ][field] + ), + ): + del request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3704,8 +4029,9 @@ def test_set_certificate_map_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3808,8 +4134,9 @@ def test_set_certificate_map_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3905,9 +4232,6 @@ def test_set_certificate_map_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxies_set_certificate_map_request_resource"] = { - "certificate_map": "certificate_map_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3949,8 +4273,9 @@ def test_set_certificate_map_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4010,6 +4335,88 @@ def test_set_certificate_map_unary_rest(request_type): request_init["target_https_proxies_set_certificate_map_request_resource"] = { "certificate_map": "certificate_map_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetCertificateMapTargetHttpsProxyRequest.meta.fields[ + "target_https_proxies_set_certificate_map_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ][field] + ), + ): + del request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_https_proxies_set_certificate_map_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4043,8 +4450,9 @@ def test_set_certificate_map_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4125,8 +4533,9 @@ def test_set_certificate_map_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4222,9 +4631,6 @@ def test_set_certificate_map_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxies_set_certificate_map_request_resource"] = { - "certificate_map": "certificate_map_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4266,8 +4672,9 @@ def test_set_certificate_map_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4327,6 +4734,88 @@ def test_set_quic_override_rest(request_type): request_init["target_https_proxies_set_quic_override_request_resource"] = { "quic_override": "quic_override_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetQuicOverrideTargetHttpsProxyRequest.meta.fields[ + "target_https_proxies_set_quic_override_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxies_set_quic_override_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_https_proxies_set_quic_override_request_resource" + ][field] + ), + ): + del request_init[ + "target_https_proxies_set_quic_override_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_https_proxies_set_quic_override_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4360,8 +4849,9 @@ def test_set_quic_override_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4464,8 +4954,9 @@ def test_set_quic_override_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4560,9 +5051,6 @@ def test_set_quic_override_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxies_set_quic_override_request_resource"] = { - "quic_override": "quic_override_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4604,8 +5092,9 @@ def test_set_quic_override_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4665,6 +5154,88 @@ def test_set_quic_override_unary_rest(request_type): request_init["target_https_proxies_set_quic_override_request_resource"] = { "quic_override": "quic_override_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetQuicOverrideTargetHttpsProxyRequest.meta.fields[ + "target_https_proxies_set_quic_override_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxies_set_quic_override_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_https_proxies_set_quic_override_request_resource" + ][field] + ), + ): + del request_init[ + "target_https_proxies_set_quic_override_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_https_proxies_set_quic_override_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4698,8 +5269,9 @@ def test_set_quic_override_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4780,8 +5352,9 @@ def test_set_quic_override_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4876,9 +5449,6 @@ def test_set_quic_override_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxies_set_quic_override_request_resource"] = { - "quic_override": "quic_override_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4920,8 +5490,9 @@ def test_set_quic_override_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4981,6 +5552,88 @@ def test_set_ssl_certificates_rest(request_type): request_init["target_https_proxies_set_ssl_certificates_request_resource"] = { "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslCertificatesTargetHttpsProxyRequest.meta.fields[ + "target_https_proxies_set_ssl_certificates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ][field] + ), + ): + del request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5014,8 +5667,9 @@ def test_set_ssl_certificates_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5118,8 +5772,9 @@ def test_set_ssl_certificates_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5215,9 +5870,6 @@ def test_set_ssl_certificates_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxies_set_ssl_certificates_request_resource"] = { - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5259,8 +5911,9 @@ def test_set_ssl_certificates_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5320,6 +5973,88 @@ def test_set_ssl_certificates_unary_rest(request_type): request_init["target_https_proxies_set_ssl_certificates_request_resource"] = { "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslCertificatesTargetHttpsProxyRequest.meta.fields[ + "target_https_proxies_set_ssl_certificates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ][field] + ), + ): + del request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5353,8 +6088,9 @@ def test_set_ssl_certificates_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5435,8 +6171,9 @@ def test_set_ssl_certificates_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5532,9 +6269,6 @@ def test_set_ssl_certificates_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["target_https_proxies_set_ssl_certificates_request_resource"] = { - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5576,8 +6310,9 @@ def test_set_ssl_certificates_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5635,6 +6370,81 @@ def test_set_ssl_policy_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslPolicyTargetHttpsProxyRequest.meta.fields[ + "ssl_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["ssl_policy_reference_resource"][field]) + ): + del request_init["ssl_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["ssl_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5668,8 +6478,9 @@ def test_set_ssl_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5772,8 +6583,9 @@ def test_set_ssl_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5868,7 +6680,6 @@ def test_set_ssl_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5910,8 +6721,9 @@ def test_set_ssl_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5969,6 +6781,81 @@ def test_set_ssl_policy_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslPolicyTargetHttpsProxyRequest.meta.fields[ + "ssl_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["ssl_policy_reference_resource"][field]) + ): + del request_init["ssl_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["ssl_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6002,8 +6889,9 @@ def test_set_ssl_policy_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6084,8 +6972,9 @@ def test_set_ssl_policy_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6180,7 +7069,6 @@ def test_set_ssl_policy_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6222,8 +7110,9 @@ def test_set_ssl_policy_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6281,6 +7170,79 @@ def test_set_url_map_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapTargetHttpsProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6314,8 +7276,9 @@ def test_set_url_map_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6418,8 +7381,9 @@ def test_set_url_map_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6514,7 +7478,6 @@ def test_set_url_map_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6554,8 +7517,9 @@ def test_set_url_map_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6611,6 +7575,79 @@ def test_set_url_map_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetUrlMapTargetHttpsProxyRequest.meta.fields[ + "url_map_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_map_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_map_reference_resource"][field]) + ): + del request_init["url_map_reference_resource"][field][i][subfield] + else: + del request_init["url_map_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6644,8 +7681,9 @@ def test_set_url_map_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6726,8 +7764,9 @@ def test_set_url_map_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6822,7 +7861,6 @@ def test_set_url_map_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_https_proxy": "sample2"} - request_init["url_map_reference_resource"] = {"url_map": "url_map_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6862,8 +7900,9 @@ def test_set_url_map_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_instances.py b/tests/unit/gapic/compute_v1/test_target_instances.py index 14482949..c968b30a 100644 --- a/tests/unit/gapic/compute_v1/test_target_instances.py +++ b/tests/unit/gapic/compute_v1/test_target_instances.py @@ -603,8 +603,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstanceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -694,8 +695,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstanceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -832,8 +834,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstanceAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -992,8 +995,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1097,8 +1101,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteTargetInstanceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1240,8 +1245,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1333,8 +1339,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1418,8 +1425,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1561,8 +1569,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1642,8 +1651,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1733,8 +1743,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetInstanceRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1878,8 +1889,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1946,6 +1958,77 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetInstanceRequest.meta.fields[ + "target_instance_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_instance_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_instance_resource"][field])): + del request_init["target_instance_resource"][field][i][subfield] + else: + del request_init["target_instance_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1979,8 +2062,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2081,8 +2165,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertTargetInstanceRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2177,18 +2262,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["target_instance_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "instance": "instance_value", - "kind": "kind_value", - "name": "name_value", - "nat_policy": "nat_policy_value", - "network": "network_value", - "self_link": "self_link_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2230,8 +2303,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2300,6 +2374,77 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "zone": "zone_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetInstanceRequest.meta.fields[ + "target_instance_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_instance_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_instance_resource"][field])): + del request_init["target_instance_resource"][field][i][subfield] + else: + del request_init["target_instance_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2333,8 +2478,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2415,8 +2561,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2511,18 +2658,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} - request_init["target_instance_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "instance": "instance_value", - "kind": "kind_value", - "name": "name_value", - "nat_policy": "nat_policy_value", - "network": "network_value", - "self_link": "self_link_value", - "zone": "zone_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2564,8 +2699,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2637,8 +2773,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstanceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2728,8 +2865,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetInstancesReque response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstanceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2871,8 +3009,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetInstanceList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetInstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_pools.py b/tests/unit/gapic/compute_v1/test_target_pools.py index 3e6d0974..c25ad24e 100644 --- a/tests/unit/gapic/compute_v1/test_target_pools.py +++ b/tests/unit/gapic/compute_v1/test_target_pools.py @@ -567,6 +567,88 @@ def test_add_health_check_rest(request_type): request_init["target_pools_add_health_check_request_resource"] = { "health_checks": [{"health_check": "health_check_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddHealthCheckTargetPoolRequest.meta.fields[ + "target_pools_add_health_check_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_add_health_check_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["target_pools_add_health_check_request_resource"][ + field + ] + ), + ): + del request_init["target_pools_add_health_check_request_resource"][ + field + ][i][subfield] + else: + del request_init["target_pools_add_health_check_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -600,8 +682,9 @@ def test_add_health_check_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -708,8 +791,9 @@ def test_add_health_check_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -805,9 +889,6 @@ def test_add_health_check_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_add_health_check_request_resource"] = { - "health_checks": [{"health_check": "health_check_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -856,8 +937,9 @@ def test_add_health_check_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -920,6 +1002,88 @@ def test_add_health_check_unary_rest(request_type): request_init["target_pools_add_health_check_request_resource"] = { "health_checks": [{"health_check": "health_check_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddHealthCheckTargetPoolRequest.meta.fields[ + "target_pools_add_health_check_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_add_health_check_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["target_pools_add_health_check_request_resource"][ + field + ] + ), + ): + del request_init["target_pools_add_health_check_request_resource"][ + field + ][i][subfield] + else: + del request_init["target_pools_add_health_check_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -953,8 +1117,9 @@ def test_add_health_check_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1039,8 +1204,9 @@ def test_add_health_check_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1136,9 +1302,6 @@ def test_add_health_check_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_add_health_check_request_resource"] = { - "health_checks": [{"health_check": "health_check_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1187,8 +1350,9 @@ def test_add_health_check_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1251,6 +1415,88 @@ def test_add_instance_rest(request_type): request_init["target_pools_add_instance_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddInstanceTargetPoolRequest.meta.fields[ + "target_pools_add_instance_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_add_instance_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["target_pools_add_instance_request_resource"][ + field + ] + ), + ): + del request_init["target_pools_add_instance_request_resource"][ + field + ][i][subfield] + else: + del request_init["target_pools_add_instance_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1284,8 +1530,9 @@ def test_add_instance_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1392,8 +1639,9 @@ def test_add_instance_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1489,9 +1737,6 @@ def test_add_instance_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_add_instance_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1538,8 +1783,9 @@ def test_add_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1600,6 +1846,88 @@ def test_add_instance_unary_rest(request_type): request_init["target_pools_add_instance_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddInstanceTargetPoolRequest.meta.fields[ + "target_pools_add_instance_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_add_instance_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["target_pools_add_instance_request_resource"][ + field + ] + ), + ): + del request_init["target_pools_add_instance_request_resource"][ + field + ][i][subfield] + else: + del request_init["target_pools_add_instance_request_resource"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1633,8 +1961,9 @@ def test_add_instance_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1719,8 +2048,9 @@ def test_add_instance_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1816,9 +2146,6 @@ def test_add_instance_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_add_instance_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1865,8 +2192,9 @@ def test_add_instance_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1940,8 +2268,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2031,8 +2360,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2169,8 +2499,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2323,8 +2654,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2428,8 +2760,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteTargetPoolReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2567,8 +2900,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2656,8 +2990,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2741,8 +3076,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2880,8 +3216,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2959,8 +3296,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3052,8 +3390,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetPoolRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3189,8 +3528,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3246,6 +3586,79 @@ def test_get_health_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} request_init["instance_reference_resource"] = {"instance": "instance_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.GetHealthTargetPoolRequest.meta.fields[ + "instance_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_reference_resource"][field]) + ): + del request_init["instance_reference_resource"][field][i][subfield] + else: + del request_init["instance_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3258,8 +3671,9 @@ def test_get_health_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolInstanceHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolInstanceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3343,8 +3757,9 @@ def test_get_health_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolInstanceHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolInstanceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3442,7 +3857,6 @@ def test_get_health_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["instance_reference_resource"] = {"instance": "instance_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3489,8 +3903,9 @@ def test_get_health_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolInstanceHealth.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolInstanceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3562,6 +3977,75 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "session_affinity": "session_affinity_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetPoolRequest.meta.fields["target_pool_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pool_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_pool_resource"][field])): + del request_init["target_pool_resource"][field][i][subfield] + else: + del request_init["target_pool_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3595,8 +4079,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3697,8 +4182,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertTargetPoolReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3793,20 +4279,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_pool_resource"] = { - "backup_pool": "backup_pool_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "failover_ratio": 0.1494, - "health_checks": ["health_checks_value1", "health_checks_value2"], - "id": 205, - "instances": ["instances_value1", "instances_value2"], - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "session_affinity": "session_affinity_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3846,8 +4318,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3916,6 +4389,75 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "session_affinity": "session_affinity_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetPoolRequest.meta.fields["target_pool_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pool_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_pool_resource"][field])): + del request_init["target_pool_resource"][field][i][subfield] + else: + del request_init["target_pool_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3949,8 +4491,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4031,8 +4574,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4127,20 +4671,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_pool_resource"] = { - "backup_pool": "backup_pool_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "failover_ratio": 0.1494, - "health_checks": ["health_checks_value1", "health_checks_value2"], - "id": 205, - "instances": ["instances_value1", "instances_value2"], - "kind": "kind_value", - "name": "name_value", - "region": "region_value", - "self_link": "self_link_value", - "session_affinity": "session_affinity_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4180,8 +4710,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4251,8 +4782,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4342,8 +4874,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetPoolsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4483,8 +5016,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetPoolList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetPoolList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4596,6 +5130,88 @@ def test_remove_health_check_rest(request_type): request_init["target_pools_remove_health_check_request_resource"] = { "health_checks": [{"health_check": "health_check_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveHealthCheckTargetPoolRequest.meta.fields[ + "target_pools_remove_health_check_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_remove_health_check_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_pools_remove_health_check_request_resource" + ][field] + ), + ): + del request_init[ + "target_pools_remove_health_check_request_resource" + ][field][i][subfield] + else: + del request_init["target_pools_remove_health_check_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4629,8 +5245,9 @@ def test_remove_health_check_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4737,8 +5354,9 @@ def test_remove_health_check_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4834,9 +5452,6 @@ def test_remove_health_check_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_remove_health_check_request_resource"] = { - "health_checks": [{"health_check": "health_check_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4885,8 +5500,9 @@ def test_remove_health_check_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4949,6 +5565,88 @@ def test_remove_health_check_unary_rest(request_type): request_init["target_pools_remove_health_check_request_resource"] = { "health_checks": [{"health_check": "health_check_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveHealthCheckTargetPoolRequest.meta.fields[ + "target_pools_remove_health_check_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_remove_health_check_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_pools_remove_health_check_request_resource" + ][field] + ), + ): + del request_init[ + "target_pools_remove_health_check_request_resource" + ][field][i][subfield] + else: + del request_init["target_pools_remove_health_check_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4982,8 +5680,9 @@ def test_remove_health_check_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5068,8 +5767,9 @@ def test_remove_health_check_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5165,9 +5865,6 @@ def test_remove_health_check_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_remove_health_check_request_resource"] = { - "health_checks": [{"health_check": "health_check_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5216,8 +5913,9 @@ def test_remove_health_check_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5280,6 +5978,88 @@ def test_remove_instance_rest(request_type): request_init["target_pools_remove_instance_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveInstanceTargetPoolRequest.meta.fields[ + "target_pools_remove_instance_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_remove_instance_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["target_pools_remove_instance_request_resource"][ + field + ] + ), + ): + del request_init["target_pools_remove_instance_request_resource"][ + field + ][i][subfield] + else: + del request_init["target_pools_remove_instance_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5313,8 +6093,9 @@ def test_remove_instance_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5421,8 +6202,9 @@ def test_remove_instance_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5518,9 +6300,6 @@ def test_remove_instance_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_remove_instance_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5567,8 +6346,9 @@ def test_remove_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5629,6 +6409,88 @@ def test_remove_instance_unary_rest(request_type): request_init["target_pools_remove_instance_request_resource"] = { "instances": [{"instance": "instance_value"}] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.RemoveInstanceTargetPoolRequest.meta.fields[ + "target_pools_remove_instance_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_pools_remove_instance_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["target_pools_remove_instance_request_resource"][ + field + ] + ), + ): + del request_init["target_pools_remove_instance_request_resource"][ + field + ][i][subfield] + else: + del request_init["target_pools_remove_instance_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5662,8 +6524,9 @@ def test_remove_instance_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5748,8 +6611,9 @@ def test_remove_instance_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5845,9 +6709,6 @@ def test_remove_instance_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_pools_remove_instance_request_resource"] = { - "instances": [{"instance": "instance_value"}] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5894,8 +6755,9 @@ def test_remove_instance_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5954,6 +6816,79 @@ def test_set_backup_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} request_init["target_reference_resource"] = {"target": "target_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetBackupTargetPoolRequest.meta.fields[ + "target_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_reference_resource"][field]) + ): + del request_init["target_reference_resource"][field][i][subfield] + else: + del request_init["target_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5987,8 +6922,9 @@ def test_set_backup_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6100,8 +7036,9 @@ def test_set_backup_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6202,7 +7139,6 @@ def test_set_backup_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_reference_resource"] = {"target": "target_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6247,8 +7183,9 @@ def test_set_backup_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6305,6 +7242,79 @@ def test_set_backup_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} request_init["target_reference_resource"] = {"target": "target_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetBackupTargetPoolRequest.meta.fields[ + "target_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_reference_resource"][field]) + ): + del request_init["target_reference_resource"][field][i][subfield] + else: + del request_init["target_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6338,8 +7348,9 @@ def test_set_backup_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6429,8 +7440,9 @@ def test_set_backup_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6531,7 +7543,6 @@ def test_set_backup_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} - request_init["target_reference_resource"] = {"target": "target_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6576,8 +7587,9 @@ def test_set_backup_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py b/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py index e2a37adb..ef406a41 100644 --- a/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py @@ -626,8 +626,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -727,8 +728,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteTargetSslProxyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -860,8 +862,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -948,8 +951,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1029,8 +1033,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1162,8 +1167,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1239,8 +1245,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetSslProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetSslProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1327,8 +1334,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetSslProxyRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetSslProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetSslProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1462,8 +1470,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetSslProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetSslProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1530,6 +1539,79 @@ def test_insert_rest(request_type): "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], "ssl_policy": "ssl_policy_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetSslProxyRequest.meta.fields[ + "target_ssl_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_ssl_proxy_resource"][field]) + ): + del request_init["target_ssl_proxy_resource"][field][i][subfield] + else: + del request_init["target_ssl_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1563,8 +1645,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1661,8 +1744,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertTargetSslProxyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1756,19 +1840,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_ssl_proxy_resource"] = { - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_header": "proxy_header_value", - "self_link": "self_link_value", - "service": "service_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1809,8 +1880,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1879,6 +1951,79 @@ def test_insert_unary_rest(request_type): "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], "ssl_policy": "ssl_policy_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetSslProxyRequest.meta.fields[ + "target_ssl_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_ssl_proxy_resource"][field]) + ): + del request_init["target_ssl_proxy_resource"][field][i][subfield] + else: + del request_init["target_ssl_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1912,8 +2057,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1990,8 +2136,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2085,19 +2232,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_ssl_proxy_resource"] = { - "certificate_map": "certificate_map_value", - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_header": "proxy_header_value", - "self_link": "self_link_value", - "service": "service_value", - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], - "ssl_policy": "ssl_policy_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2138,8 +2272,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2210,8 +2345,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetSslProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetSslProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2297,8 +2433,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetSslProxiesRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetSslProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetSslProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2434,8 +2571,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetSslProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetSslProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2546,6 +2684,88 @@ def test_set_backend_service_rest(request_type): request_init["target_ssl_proxies_set_backend_service_request_resource"] = { "service": "service_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetBackendServiceTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_backend_service_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2579,8 +2799,9 @@ def test_set_backend_service_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2683,8 +2904,9 @@ def test_set_backend_service_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2779,9 +3001,6 @@ def test_set_backend_service_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_backend_service_request_resource"] = { - "service": "service_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2823,8 +3042,9 @@ def test_set_backend_service_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2884,6 +3104,88 @@ def test_set_backend_service_unary_rest(request_type): request_init["target_ssl_proxies_set_backend_service_request_resource"] = { "service": "service_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetBackendServiceTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_backend_service_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2917,8 +3219,9 @@ def test_set_backend_service_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2999,8 +3302,9 @@ def test_set_backend_service_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3095,9 +3399,6 @@ def test_set_backend_service_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_backend_service_request_resource"] = { - "service": "service_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3139,8 +3440,9 @@ def test_set_backend_service_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3200,6 +3502,88 @@ def test_set_certificate_map_rest(request_type): request_init["target_ssl_proxies_set_certificate_map_request_resource"] = { "certificate_map": "certificate_map_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetCertificateMapTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_certificate_map_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3233,8 +3617,9 @@ def test_set_certificate_map_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3337,8 +3722,9 @@ def test_set_certificate_map_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3433,9 +3819,6 @@ def test_set_certificate_map_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_certificate_map_request_resource"] = { - "certificate_map": "certificate_map_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3477,8 +3860,9 @@ def test_set_certificate_map_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3538,6 +3922,88 @@ def test_set_certificate_map_unary_rest(request_type): request_init["target_ssl_proxies_set_certificate_map_request_resource"] = { "certificate_map": "certificate_map_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetCertificateMapTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_certificate_map_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_certificate_map_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3571,8 +4037,9 @@ def test_set_certificate_map_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3653,8 +4120,9 @@ def test_set_certificate_map_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3749,9 +4217,6 @@ def test_set_certificate_map_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_certificate_map_request_resource"] = { - "certificate_map": "certificate_map_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3793,8 +4258,9 @@ def test_set_certificate_map_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3854,6 +4320,88 @@ def test_set_proxy_header_rest(request_type): request_init["target_ssl_proxies_set_proxy_header_request_resource"] = { "proxy_header": "proxy_header_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetProxyHeaderTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_proxy_header_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3887,8 +4435,9 @@ def test_set_proxy_header_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3991,8 +4540,9 @@ def test_set_proxy_header_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4087,9 +4637,6 @@ def test_set_proxy_header_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_proxy_header_request_resource"] = { - "proxy_header": "proxy_header_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4131,8 +4678,9 @@ def test_set_proxy_header_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4192,6 +4740,88 @@ def test_set_proxy_header_unary_rest(request_type): request_init["target_ssl_proxies_set_proxy_header_request_resource"] = { "proxy_header": "proxy_header_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetProxyHeaderTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_proxy_header_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4225,8 +4855,9 @@ def test_set_proxy_header_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4307,8 +4938,9 @@ def test_set_proxy_header_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4403,9 +5035,6 @@ def test_set_proxy_header_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_proxy_header_request_resource"] = { - "proxy_header": "proxy_header_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4447,8 +5076,9 @@ def test_set_proxy_header_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4508,6 +5138,88 @@ def test_set_ssl_certificates_rest(request_type): request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = { "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslCertificatesTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4541,8 +5253,9 @@ def test_set_ssl_certificates_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4645,8 +5358,9 @@ def test_set_ssl_certificates_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4742,9 +5456,6 @@ def test_set_ssl_certificates_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = { - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4786,8 +5497,9 @@ def test_set_ssl_certificates_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4847,6 +5559,88 @@ def test_set_ssl_certificates_unary_rest(request_type): request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = { "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslCertificatesTargetSslProxyRequest.meta.fields[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ][field] + ), + ): + del request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4880,8 +5674,9 @@ def test_set_ssl_certificates_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4962,8 +5757,9 @@ def test_set_ssl_certificates_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5059,9 +5855,6 @@ def test_set_ssl_certificates_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = { - "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5103,8 +5896,9 @@ def test_set_ssl_certificates_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5162,6 +5956,81 @@ def test_set_ssl_policy_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslPolicyTargetSslProxyRequest.meta.fields[ + "ssl_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["ssl_policy_reference_resource"][field]) + ): + del request_init["ssl_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["ssl_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5195,8 +6064,9 @@ def test_set_ssl_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5299,8 +6169,9 @@ def test_set_ssl_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5395,7 +6266,6 @@ def test_set_ssl_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5437,8 +6307,9 @@ def test_set_ssl_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5496,6 +6367,81 @@ def test_set_ssl_policy_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSslPolicyTargetSslProxyRequest.meta.fields[ + "ssl_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "ssl_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["ssl_policy_reference_resource"][field]) + ): + del request_init["ssl_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["ssl_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5529,8 +6475,9 @@ def test_set_ssl_policy_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5611,8 +6558,9 @@ def test_set_ssl_policy_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5707,7 +6655,6 @@ def test_set_ssl_policy_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} - request_init["ssl_policy_reference_resource"] = {"ssl_policy": "ssl_policy_value"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5749,8 +6696,9 @@ def test_set_ssl_policy_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py index 0400e30c..7795c0ac 100644 --- a/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -838,8 +840,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -994,8 +997,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1095,8 +1099,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteTargetTcpProxyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1228,8 +1233,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1316,8 +1322,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1397,8 +1404,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1530,8 +1538,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1606,8 +1615,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1693,8 +1703,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetTcpProxyRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1828,8 +1839,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1895,6 +1907,79 @@ def test_insert_rest(request_type): "self_link": "self_link_value", "service": "service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_tcp_proxy_resource"][field]) + ): + del request_init["target_tcp_proxy_resource"][field][i][subfield] + else: + del request_init["target_tcp_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1928,8 +2013,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2026,8 +2112,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertTargetTcpProxyRe response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2121,18 +2208,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_tcp_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "proxy_header": "proxy_header_value", - "region": "region_value", - "self_link": "self_link_value", - "service": "service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2173,8 +2248,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2242,6 +2318,79 @@ def test_insert_unary_rest(request_type): "self_link": "self_link_value", "service": "service_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_tcp_proxy_resource"][field]) + ): + del request_init["target_tcp_proxy_resource"][field][i][subfield] + else: + del request_init["target_tcp_proxy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2275,8 +2424,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2353,8 +2503,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2448,18 +2599,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["target_tcp_proxy_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "proxy_bind": True, - "proxy_header": "proxy_header_value", - "region": "region_value", - "self_link": "self_link_value", - "service": "service_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2500,8 +2639,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2572,8 +2712,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2659,8 +2800,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetTcpProxiesRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2796,8 +2938,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetTcpProxyList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2908,6 +3051,88 @@ def test_set_backend_service_rest(request_type): request_init["target_tcp_proxies_set_backend_service_request_resource"] = { "service": "service_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetBackendServiceTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxies_set_backend_service_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ][field] + ), + ): + del request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2941,8 +3166,9 @@ def test_set_backend_service_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3045,8 +3271,9 @@ def test_set_backend_service_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3141,9 +3368,6 @@ def test_set_backend_service_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} - request_init["target_tcp_proxies_set_backend_service_request_resource"] = { - "service": "service_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3185,8 +3409,9 @@ def test_set_backend_service_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3246,6 +3471,88 @@ def test_set_backend_service_unary_rest(request_type): request_init["target_tcp_proxies_set_backend_service_request_resource"] = { "service": "service_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetBackendServiceTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxies_set_backend_service_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ][field] + ), + ): + del request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3279,8 +3586,9 @@ def test_set_backend_service_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3361,8 +3669,9 @@ def test_set_backend_service_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3457,9 +3766,6 @@ def test_set_backend_service_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} - request_init["target_tcp_proxies_set_backend_service_request_resource"] = { - "service": "service_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3501,8 +3807,9 @@ def test_set_backend_service_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3562,6 +3869,88 @@ def test_set_proxy_header_rest(request_type): request_init["target_tcp_proxies_set_proxy_header_request_resource"] = { "proxy_header": "proxy_header_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetProxyHeaderTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxies_set_proxy_header_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ][field] + ), + ): + del request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3595,8 +3984,9 @@ def test_set_proxy_header_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3699,8 +4089,9 @@ def test_set_proxy_header_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3795,9 +4186,6 @@ def test_set_proxy_header_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} - request_init["target_tcp_proxies_set_proxy_header_request_resource"] = { - "proxy_header": "proxy_header_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3839,8 +4227,9 @@ def test_set_proxy_header_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3900,6 +4289,88 @@ def test_set_proxy_header_unary_rest(request_type): request_init["target_tcp_proxies_set_proxy_header_request_resource"] = { "proxy_header": "proxy_header_value" } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetProxyHeaderTargetTcpProxyRequest.meta.fields[ + "target_tcp_proxies_set_proxy_header_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ][field] + ), + ): + del request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ][field][i][subfield] + else: + del request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3933,8 +4404,9 @@ def test_set_proxy_header_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4015,8 +4487,9 @@ def test_set_proxy_header_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4111,9 +4584,6 @@ def test_set_proxy_header_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} - request_init["target_tcp_proxies_set_proxy_header_request_resource"] = { - "proxy_header": "proxy_header_value" - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4155,8 +4625,9 @@ def test_set_proxy_header_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py index cf275bf7..79a0a7e4 100644 --- a/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py +++ b/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -609,8 +609,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -700,8 +701,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -838,8 +840,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -998,8 +1001,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1105,8 +1109,9 @@ def test_delete_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1248,8 +1253,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1341,8 +1347,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1426,8 +1433,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1569,8 +1577,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1652,8 +1661,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1745,8 +1755,9 @@ def test_get_rest_required_fields(request_type=compute.GetTargetVpnGatewayReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1890,8 +1901,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1961,6 +1973,79 @@ def test_insert_rest(request_type): "status": "status_value", "tunnels": ["tunnels_value1", "tunnels_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetVpnGatewayRequest.meta.fields[ + "target_vpn_gateway_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_vpn_gateway_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_vpn_gateway_resource"][field]) + ): + del request_init["target_vpn_gateway_resource"][field][i][subfield] + else: + del request_init["target_vpn_gateway_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1994,8 +2079,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2098,8 +2184,9 @@ def test_insert_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2194,21 +2281,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_vpn_gateway_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "forwarding_rules": ["forwarding_rules_value1", "forwarding_rules_value2"], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - "tunnels": ["tunnels_value1", "tunnels_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2250,8 +2322,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2323,6 +2396,79 @@ def test_insert_unary_rest(request_type): "status": "status_value", "tunnels": ["tunnels_value1", "tunnels_value2"], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertTargetVpnGatewayRequest.meta.fields[ + "target_vpn_gateway_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "target_vpn_gateway_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["target_vpn_gateway_resource"][field]) + ): + del request_init["target_vpn_gateway_resource"][field][i][subfield] + else: + del request_init["target_vpn_gateway_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2356,8 +2502,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2438,8 +2585,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2534,21 +2682,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["target_vpn_gateway_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "forwarding_rules": ["forwarding_rules_value1", "forwarding_rules_value2"], - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - "tunnels": ["tunnels_value1", "tunnels_value2"], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2590,8 +2723,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2663,8 +2797,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2754,8 +2889,9 @@ def test_list_rest_required_fields(request_type=compute.ListTargetVpnGatewaysReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2897,8 +3033,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TargetVpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TargetVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3011,6 +3148,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsTargetVpnGatewayRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3044,8 +3256,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3152,8 +3365,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3249,10 +3463,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3299,8 +3509,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3362,6 +3573,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsTargetVpnGatewayRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3395,8 +3681,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3481,8 +3768,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3578,10 +3866,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3628,8 +3912,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_url_maps.py b/tests/unit/gapic/compute_v1/test_url_maps.py index 39c8f48f..07aea15f 100644 --- a/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/tests/unit/gapic/compute_v1/test_url_maps.py @@ -571,8 +571,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -662,8 +663,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -798,8 +800,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -949,8 +952,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1050,8 +1054,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteUrlMapRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1179,8 +1184,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1267,8 +1273,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1346,8 +1353,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteUrlMapRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1475,8 +1483,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1550,8 +1559,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMap.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1636,8 +1646,9 @@ def test_get_rest_required_fields(request_type=compute.GetUrlMapRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMap.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1765,8 +1776,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMap.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1991,6 +2003,73 @@ def test_insert_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2024,8 +2103,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2122,8 +2202,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertUrlMapRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2213,177 +2294,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2424,8 +2334,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2651,6 +2562,73 @@ def test_insert_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2684,8 +2662,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2760,8 +2739,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertUrlMapRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2851,177 +2831,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3062,8 +2871,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3122,6 +2932,81 @@ def test_invalidate_cache_rest(request_type): "host": "host_value", "path": "path_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InvalidateCacheUrlMapRequest.meta.fields[ + "cache_invalidation_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "cache_invalidation_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["cache_invalidation_rule_resource"][field]) + ): + del request_init["cache_invalidation_rule_resource"][field][i][ + subfield + ] + else: + del request_init["cache_invalidation_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3155,8 +3040,9 @@ def test_invalidate_cache_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3259,8 +3145,9 @@ def test_invalidate_cache_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3353,10 +3240,6 @@ def test_invalidate_cache_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "url_map": "sample2"} - request_init["cache_invalidation_rule_resource"] = { - "host": "host_value", - "path": "path_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3398,8 +3281,9 @@ def test_invalidate_cache_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3460,6 +3344,81 @@ def test_invalidate_cache_unary_rest(request_type): "host": "host_value", "path": "path_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InvalidateCacheUrlMapRequest.meta.fields[ + "cache_invalidation_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "cache_invalidation_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["cache_invalidation_rule_resource"][field]) + ): + del request_init["cache_invalidation_rule_resource"][field][i][ + subfield + ] + else: + del request_init["cache_invalidation_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3493,8 +3452,9 @@ def test_invalidate_cache_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3575,8 +3535,9 @@ def test_invalidate_cache_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3669,10 +3630,6 @@ def test_invalidate_cache_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "url_map": "sample2"} - request_init["cache_invalidation_rule_resource"] = { - "host": "host_value", - "path": "path_value", - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3714,8 +3671,9 @@ def test_invalidate_cache_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3787,8 +3745,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3874,8 +3833,9 @@ def test_list_rest_required_fields(request_type=compute.ListUrlMapsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4005,8 +3965,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4284,6 +4245,73 @@ def test_patch_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4317,8 +4345,9 @@ def test_patch_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4419,8 +4448,9 @@ def test_patch_rest_required_fields(request_type=compute.PatchUrlMapRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4509,6 +4539,106 @@ def test_patch_rest_bad_request( transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + url_map="url_map_value", + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchUrlMapRequest(), + project="project_value", + url_map="url_map_value", + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_patch_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchUrlMapRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # send a request that will satisfy transcoding request_init = {"project": "sample1", "url_map": "sample2"} request_init["url_map_resource"] = { @@ -4682,277 +4812,74 @@ def test_patch_rest_bad_request( } ], } - request = request_type(**request_init) + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.patch(request) + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - -def test_patch_rest_flattened(): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation() - - # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "url_map": "sample2"} - - # get truthy value for each flattened field - mock_args = dict( - project="project_value", - url_map="url_map_value", - url_map_resource=compute.UrlMap( - creation_timestamp="creation_timestamp_value" - ), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.patch(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" - % client.transport._host, - args[1], - ) - - -def test_patch_rest_flattened_error(transport: str = "rest"): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.patch( - compute.PatchUrlMapRequest(), - project="project_value", - url_map="url_map_value", - url_map_resource=compute.UrlMap( - creation_timestamp="creation_timestamp_value" - ), - ) - - -def test_patch_rest_error(): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - compute.PatchUrlMapRequest, - dict, - ], -) -def test_patch_unary_rest(request_type): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "url_map": "sample2"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } - request = request_type(**request_init) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] + request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: @@ -4985,8 +4912,9 @@ def test_patch_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5065,8 +4993,9 @@ def test_patch_unary_rest_required_fields(request_type=compute.PatchUrlMapReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5092,242 +5021,71 @@ def test_patch_unary_rest_unset_required_fields(): "urlMap", "urlMapResource", ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): - transport = transports.UrlMapsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), - ) - client = UrlMapsClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.UrlMapsRestInterceptor, "post_patch" - ) as post, mock.patch.object( - transports.UrlMapsRestInterceptor, "pre_patch" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = compute.PatchUrlMapRequest.pb(compute.PatchUrlMapRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.PatchUrlMapRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.patch_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchUrlMapRequest -): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "url_map": "sample2"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchUrlMapRequest.pb(compute.PatchUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchUrlMapRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5369,8 +5127,9 @@ def test_patch_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5598,6 +5357,73 @@ def test_update_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5631,8 +5457,9 @@ def test_update_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5733,8 +5560,9 @@ def test_update_rest_required_fields(request_type=compute.UpdateUrlMapRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5812,190 +5640,19 @@ def test_update_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - - -def test_update_rest_bad_request( - transport: str = "rest", request_type=compute.UpdateUrlMapRequest -): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "url_map": "sample2"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } + post.assert_called_once() + + +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6037,8 +5694,9 @@ def test_update_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6266,6 +5924,73 @@ def test_update_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateUrlMapRequest.meta.fields["url_map_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_map_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_map_resource"][field])): + del request_init["url_map_resource"][field][i][subfield] + else: + del request_init["url_map_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6299,8 +6024,9 @@ def test_update_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6379,8 +6105,9 @@ def test_update_unary_rest_required_fields(request_type=compute.UpdateUrlMapRequ response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6406,242 +6133,71 @@ def test_update_unary_rest_unset_required_fields(): "urlMap", "urlMapResource", ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_unary_rest_interceptors(null_interceptor): - transport = transports.UrlMapsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), - ) - client = UrlMapsClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.UrlMapsRestInterceptor, "post_update" - ) as post, mock.patch.object( - transports.UrlMapsRestInterceptor, "pre_update" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = compute.UpdateUrlMapRequest.pb(compute.UpdateUrlMapRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) - - request = compute.UpdateUrlMapRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = compute.Operation() - - client.update_unary( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_unary_rest_bad_request( - transport: str = "rest", request_type=compute.UpdateUrlMapRequest -): - client = UrlMapsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "url_map": "sample2"} - request_init["url_map_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": ["expose_headers_value1", "expose_headers_value2"], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - } + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_update" + ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "pre_update" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateUrlMapRequest.pb(compute.UpdateUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateUrlMapRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6683,8 +6239,9 @@ def test_update_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6921,6 +6478,81 @@ def test_validate_rest(request_type): ], }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.ValidateUrlMapRequest.meta.fields[ + "url_maps_validate_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "url_maps_validate_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["url_maps_validate_request_resource"][field]) + ): + del request_init["url_maps_validate_request_resource"][field][i][ + subfield + ] + else: + del request_init["url_maps_validate_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6931,8 +6563,9 @@ def test_validate_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7009,8 +6642,9 @@ def test_validate_rest_required_fields(request_type=compute.ValidateUrlMapReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7103,186 +6737,6 @@ def test_validate_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "url_map": "sample2"} - request_init["url_maps_validate_request_resource"] = { - "load_balancing_schemes": [ - "load_balancing_schemes_value1", - "load_balancing_schemes_value2", - ], - "resource": { - "creation_timestamp": "creation_timestamp_value", - "default_route_action": { - "cors_policy": { - "allow_credentials": True, - "allow_headers": ["allow_headers_value1", "allow_headers_value2"], - "allow_methods": ["allow_methods_value1", "allow_methods_value2"], - "allow_origin_regexes": [ - "allow_origin_regexes_value1", - "allow_origin_regexes_value2", - ], - "allow_origins": ["allow_origins_value1", "allow_origins_value2"], - "disabled": True, - "expose_headers": [ - "expose_headers_value1", - "expose_headers_value2", - ], - "max_age": 722, - }, - "fault_injection_policy": { - "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, - }, - "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, - "retry_policy": { - "num_retries": 1197, - "per_try_timeout": {}, - "retry_conditions": [ - "retry_conditions_value1", - "retry_conditions_value2", - ], - }, - "timeout": {}, - "url_rewrite": { - "host_rewrite": "host_rewrite_value", - "path_prefix_rewrite": "path_prefix_rewrite_value", - "path_template_rewrite": "path_template_rewrite_value", - }, - "weighted_backend_services": [ - { - "backend_service": "backend_service_value", - "header_action": { - "request_headers_to_add": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - "replace": True, - } - ], - "request_headers_to_remove": [ - "request_headers_to_remove_value1", - "request_headers_to_remove_value2", - ], - "response_headers_to_add": {}, - "response_headers_to_remove": [ - "response_headers_to_remove_value1", - "response_headers_to_remove_value2", - ], - }, - "weight": 648, - } - ], - }, - "default_service": "default_service_value", - "default_url_redirect": { - "host_redirect": "host_redirect_value", - "https_redirect": True, - "path_redirect": "path_redirect_value", - "prefix_redirect": "prefix_redirect_value", - "redirect_response_code": "redirect_response_code_value", - "strip_query": True, - }, - "description": "description_value", - "fingerprint": "fingerprint_value", - "header_action": {}, - "host_rules": [ - { - "description": "description_value", - "hosts": ["hosts_value1", "hosts_value2"], - "path_matcher": "path_matcher_value", - } - ], - "id": 205, - "kind": "kind_value", - "name": "name_value", - "path_matchers": [ - { - "default_route_action": {}, - "default_service": "default_service_value", - "default_url_redirect": {}, - "description": "description_value", - "header_action": {}, - "name": "name_value", - "path_rules": [ - { - "paths": ["paths_value1", "paths_value2"], - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - "route_rules": [ - { - "description": "description_value", - "header_action": {}, - "match_rules": [ - { - "full_path_match": "full_path_match_value", - "header_matches": [ - { - "exact_match": "exact_match_value", - "header_name": "header_name_value", - "invert_match": True, - "prefix_match": "prefix_match_value", - "present_match": True, - "range_match": { - "range_end": 931, - "range_start": 1178, - }, - "regex_match": "regex_match_value", - "suffix_match": "suffix_match_value", - } - ], - "ignore_case": True, - "metadata_filters": [ - { - "filter_labels": [ - { - "name": "name_value", - "value": "value_value", - } - ], - "filter_match_criteria": "filter_match_criteria_value", - } - ], - "path_template_match": "path_template_match_value", - "prefix_match": "prefix_match_value", - "query_parameter_matches": [ - { - "exact_match": "exact_match_value", - "name": "name_value", - "present_match": True, - "regex_match": "regex_match_value", - } - ], - "regex_match": "regex_match_value", - } - ], - "priority": 898, - "route_action": {}, - "service": "service_value", - "url_redirect": {}, - } - ], - } - ], - "region": "region_value", - "self_link": "self_link_value", - "tests": [ - { - "description": "description_value", - "expected_output_url": "expected_output_url_value", - "expected_redirect_response_code": 3275, - "headers": [{"name": "name_value", "value": "value_value"}], - "host": "host_value", - "path": "path_value", - "service": "service_value", - } - ], - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7324,8 +6778,9 @@ def test_validate_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/tests/unit/gapic/compute_v1/test_vpn_gateways.py index e2b70ce0..3f11d123 100644 --- a/tests/unit/gapic/compute_v1/test_vpn_gateways.py +++ b/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -580,8 +580,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewayAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -671,8 +672,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewayAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -809,8 +811,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewayAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -963,8 +966,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1068,8 +1072,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteVpnGatewayReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1207,8 +1212,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1296,8 +1302,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1381,8 +1388,9 @@ def test_delete_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1520,8 +1528,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1597,8 +1606,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1688,8 +1698,9 @@ def test_get_rest_required_fields(request_type=compute.GetVpnGatewayRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1825,8 +1836,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGateway.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1891,8 +1903,9 @@ def test_get_status_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1974,8 +1987,9 @@ def test_get_status_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2115,8 +2129,9 @@ def test_get_status_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2191,6 +2206,75 @@ def test_insert_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertVpnGatewayRequest.meta.fields["vpn_gateway_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vpn_gateway_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpn_gateway_resource"][field])): + del request_init["vpn_gateway_resource"][field][i][subfield] + else: + del request_init["vpn_gateway_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2224,8 +2308,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2326,8 +2411,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertVpnGatewayReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2422,26 +2508,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["vpn_gateway_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "vpn_interfaces": [ - { - "id": 205, - "interconnect_attachment": "interconnect_attachment_value", - "ip_address": "ip_address_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2483,8 +2549,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2561,6 +2628,75 @@ def test_insert_unary_rest(request_type): } ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertVpnGatewayRequest.meta.fields["vpn_gateway_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vpn_gateway_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpn_gateway_resource"][field])): + del request_init["vpn_gateway_resource"][field][i][subfield] + else: + del request_init["vpn_gateway_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2594,8 +2730,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2676,8 +2813,9 @@ def test_insert_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2772,26 +2910,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["vpn_gateway_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "network": "network_value", - "region": "region_value", - "self_link": "self_link_value", - "stack_type": "stack_type_value", - "vpn_interfaces": [ - { - "id": 205, - "interconnect_attachment": "interconnect_attachment_value", - "ip_address": "ip_address_value", - } - ], - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2833,8 +2951,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2906,8 +3025,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2997,8 +3117,9 @@ def test_list_rest_required_fields(request_type=compute.ListVpnGatewaysRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3138,8 +3259,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnGatewayList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3252,6 +3374,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsVpnGatewayRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3285,8 +3482,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3393,8 +3591,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3490,10 +3689,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3540,8 +3735,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3603,6 +3799,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsVpnGatewayRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3636,8 +3907,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3722,8 +3994,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3819,10 +4092,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3869,8 +4138,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3931,6 +4201,81 @@ def test_test_iam_permissions_rest(request_type): request_init["test_permissions_request_resource"] = { "permissions": ["permissions_value1", "permissions_value2"] } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsVpnGatewayRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3943,8 +4288,9 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4028,8 +4374,9 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4127,9 +4474,6 @@ def test_test_iam_permissions_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["test_permissions_request_resource"] = { - "permissions": ["permissions_value1", "permissions_value2"] - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4176,8 +4520,9 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.TestPermissionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/tests/unit/gapic/compute_v1/test_vpn_tunnels.py index 499020f3..7f5f175e 100644 --- a/tests/unit/gapic/compute_v1/test_vpn_tunnels.py +++ b/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -580,8 +580,9 @@ def test_aggregated_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnelAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnelAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -671,8 +672,9 @@ def test_aggregated_list_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnelAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnelAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -809,8 +811,9 @@ def test_aggregated_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnelAggregatedList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnelAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -960,8 +963,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1065,8 +1069,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteVpnTunnelRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1202,8 +1207,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1291,8 +1297,9 @@ def test_delete_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1374,8 +1381,9 @@ def test_delete_unary_rest_required_fields(request_type=compute.DeleteVpnTunnelR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1511,8 +1519,9 @@ def test_delete_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1601,8 +1610,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnel.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1705,8 +1715,9 @@ def test_get_rest_required_fields(request_type=compute.GetVpnTunnelRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnel.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1842,8 +1853,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnel.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1930,6 +1942,73 @@ def test_insert_rest(request_type): "vpn_gateway": "vpn_gateway_value", "vpn_gateway_interface": 2229, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertVpnTunnelRequest.meta.fields["vpn_tunnel_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["vpn_tunnel_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpn_tunnel_resource"][field])): + del request_init["vpn_tunnel_resource"][field][i][subfield] + else: + del request_init["vpn_tunnel_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1963,8 +2042,9 @@ def test_insert_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2065,8 +2145,9 @@ def test_insert_rest_required_fields(request_type=compute.InsertVpnTunnelRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2159,38 +2240,6 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["vpn_tunnel_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "detailed_status": "detailed_status_value", - "id": 205, - "ike_version": 1182, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "local_traffic_selector": [ - "local_traffic_selector_value1", - "local_traffic_selector_value2", - ], - "name": "name_value", - "peer_external_gateway": "peer_external_gateway_value", - "peer_external_gateway_interface": 3279, - "peer_gcp_gateway": "peer_gcp_gateway_value", - "peer_ip": "peer_ip_value", - "region": "region_value", - "remote_traffic_selector": [ - "remote_traffic_selector_value1", - "remote_traffic_selector_value2", - ], - "router": "router_value", - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "shared_secret_hash": "shared_secret_hash_value", - "status": "status_value", - "target_vpn_gateway": "target_vpn_gateway_value", - "vpn_gateway": "vpn_gateway_value", - "vpn_gateway_interface": 2229, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2232,8 +2281,9 @@ def test_insert_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2322,6 +2372,73 @@ def test_insert_unary_rest(request_type): "vpn_gateway": "vpn_gateway_value", "vpn_gateway_interface": 2229, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertVpnTunnelRequest.meta.fields["vpn_tunnel_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["vpn_tunnel_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpn_tunnel_resource"][field])): + del request_init["vpn_tunnel_resource"][field][i][subfield] + else: + del request_init["vpn_tunnel_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2355,8 +2472,9 @@ def test_insert_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2435,8 +2553,9 @@ def test_insert_unary_rest_required_fields(request_type=compute.InsertVpnTunnelR response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2529,38 +2648,6 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["vpn_tunnel_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "detailed_status": "detailed_status_value", - "id": 205, - "ike_version": 1182, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "local_traffic_selector": [ - "local_traffic_selector_value1", - "local_traffic_selector_value2", - ], - "name": "name_value", - "peer_external_gateway": "peer_external_gateway_value", - "peer_external_gateway_interface": 3279, - "peer_gcp_gateway": "peer_gcp_gateway_value", - "peer_ip": "peer_ip_value", - "region": "region_value", - "remote_traffic_selector": [ - "remote_traffic_selector_value1", - "remote_traffic_selector_value2", - ], - "router": "router_value", - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "shared_secret_hash": "shared_secret_hash_value", - "status": "status_value", - "target_vpn_gateway": "target_vpn_gateway_value", - "vpn_gateway": "vpn_gateway_value", - "vpn_gateway_interface": 2229, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2602,8 +2689,9 @@ def test_insert_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2675,8 +2763,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnelList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnelList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2766,8 +2855,9 @@ def test_list_rest_required_fields(request_type=compute.ListVpnTunnelsRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnelList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnelList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2907,8 +2997,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.VpnTunnelList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.VpnTunnelList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3021,6 +3112,81 @@ def test_set_labels_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsVpnTunnelRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3054,8 +3220,9 @@ def test_set_labels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3162,8 +3329,9 @@ def test_set_labels_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3259,10 +3427,6 @@ def test_set_labels_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3309,8 +3473,9 @@ def test_set_labels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3372,6 +3537,81 @@ def test_set_labels_unary_rest(request_type): "label_fingerprint": "label_fingerprint_value", "labels": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsVpnTunnelRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3405,8 +3645,9 @@ def test_set_labels_unary_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3491,8 +3732,9 @@ def test_set_labels_unary_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3588,10 +3830,6 @@ def test_set_labels_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_labels_request_resource"] = { - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3638,8 +3876,9 @@ def test_set_labels_unary_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_zone_operations.py b/tests/unit/gapic/compute_v1/test_zone_operations.py index 5e60120a..b5222899 100644 --- a/tests/unit/gapic/compute_v1/test_zone_operations.py +++ b/tests/unit/gapic/compute_v1/test_zone_operations.py @@ -576,8 +576,9 @@ def test_delete_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteZoneOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteZoneOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -657,8 +658,9 @@ def test_delete_rest_required_fields(request_type=compute.DeleteZoneOperationReq response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteZoneOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteZoneOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -798,8 +800,9 @@ def test_delete_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.DeleteZoneOperationResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.DeleteZoneOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -887,8 +890,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -990,8 +994,9 @@ def test_get_rest_required_fields(request_type=compute.GetZoneOperationRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1129,8 +1134,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1200,8 +1206,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1291,8 +1298,9 @@ def test_list_rest_required_fields(request_type=compute.ListZoneOperationsReques response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1434,8 +1442,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.OperationList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1577,8 +1586,9 @@ def test_wait_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1680,8 +1690,9 @@ def test_wait_rest_required_fields(request_type=compute.WaitZoneOperationRequest response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1819,8 +1830,9 @@ def test_wait_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/compute_v1/test_zones.py b/tests/unit/gapic/compute_v1/test_zones.py index 53a73498..caab78ed 100644 --- a/tests/unit/gapic/compute_v1/test_zones.py +++ b/tests/unit/gapic/compute_v1/test_zones.py @@ -567,8 +567,9 @@ def test_get_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Zone.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -654,8 +655,9 @@ def test_get_rest_required_fields(request_type=compute.GetZoneRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Zone.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -783,8 +785,9 @@ def test_get_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.Zone.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -852,8 +855,9 @@ def test_list_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ZoneList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ZoneList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -939,8 +943,9 @@ def test_list_rest_required_fields(request_type=compute.ListZonesRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ZoneList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ZoneList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1070,8 +1075,9 @@ def test_list_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = compute.ZoneList.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = compute.ZoneList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value